diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py b/dev/breeze/src/airflow_breeze/utils/packages.py index 9e2fbecbfc507..0dab20d53fcc0 100644 --- a/dev/breeze/src/airflow_breeze/utils/packages.py +++ b/dev/breeze/src/airflow_breeze/utils/packages.py @@ -630,7 +630,7 @@ def format_version_suffix(version_suffix: str) -> str: """ if version_suffix: - if "." == version_suffix[0] or "+" == version_suffix[0]: + if version_suffix[0] == "." or version_suffix[0] == "+": return version_suffix else: return f".{version_suffix}" diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 53a53a5015c53..14abb95376e85 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -1326,10 +1326,10 @@ def is_amd_runner(self) -> bool: """ return any( [ - "amd" == label.lower() - or "amd64" == label.lower() - or "x64" == label.lower() - or "asf-runner" == label + label.lower() == "amd" + or label.lower() == "amd64" + or label.lower() == "x64" + or label == "asf-runner" or ("ubuntu" in label and "arm" not in label.lower()) for label in json.loads(self.runs_on_as_json_public) ] @@ -1346,7 +1346,7 @@ def is_arm_runner(self) -> bool: """ return any( [ - "arm" == label.lower() or "arm64" == label.lower() or "asf-arm" == label + label.lower() == "arm" or label.lower() == "arm64" or label == "asf-arm" for label in json.loads(self.runs_on_as_json_public) ] ) diff --git a/dev/breeze/tests/test_packages.py b/dev/breeze/tests/test_packages.py index d1eea1c4acd1e..8442e59f9cc3a 100644 --- a/dev/breeze/tests/test_packages.py +++ b/dev/breeze/tests/test_packages.py @@ -109,17 +109,17 @@ def test_get_provider_requirements(): def test_get_removed_providers(): # Modify it every time we schedule provider for removal or remove it - assert [] == get_removed_provider_ids() + assert get_removed_provider_ids() == [] def test_get_suspended_provider_ids(): # Modify it every time we suspend/resume provider - assert [] == get_suspended_provider_ids() + assert get_suspended_provider_ids() == [] def test_get_suspended_provider_folders(): # Modify it every time we suspend/resume provider - assert [] == get_suspended_provider_folders() + assert get_suspended_provider_folders() == [] @pytest.mark.parametrize( diff --git a/docker_tests/test_prod_image.py b/docker_tests/test_prod_image.py index 95c284dec1fe7..f896f2445077a 100644 --- a/docker_tests/test_prod_image.py +++ b/docker_tests/test_prod_image.py @@ -57,13 +57,13 @@ def test_without_command(self, default_docker_image): """Checking the image without a command. It should return non-zero exit code.""" with pytest.raises(DockerException) as ctx: run_cmd_in_docker(image=default_docker_image) - assert 2 == ctx.value.return_code + assert ctx.value.return_code == 2 def test_airflow_command(self, default_docker_image): """Checking 'airflow' command. It should return non-zero exit code.""" with pytest.raises(DockerException) as ctx: run_airflow_cmd_in_docker(image=default_docker_image) - assert 2 == ctx.value.return_code + assert ctx.value.return_code == 2 def test_airflow_version(self, default_docker_image): """Checking 'airflow version' command. It should return zero exit code.""" diff --git a/helm_tests/airflow_aux/test_airflow_common.py b/helm_tests/airflow_aux/test_airflow_common.py index 13a72c2b3d76b..d56d648f19af4 100644 --- a/helm_tests/airflow_aux/test_airflow_common.py +++ b/helm_tests/airflow_aux/test_airflow_common.py @@ -99,7 +99,7 @@ def test_dags_mount(self, dag_values, expected_mount): ], ) - assert 3 == len(docs) + assert len(docs) == 3 for doc in docs: assert expected_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", doc) @@ -164,7 +164,7 @@ def test_annotations(self): ], ) - assert 7 == len(k8s_objects) + assert len(k8s_objects) == 7 for k8s_object in k8s_objects: if k8s_object["kind"] == "CronJob": @@ -225,7 +225,7 @@ def test_global_affinity_tolerations_topology_spread_constraints_and_node_select ], ) - assert 12 == len(k8s_objects) + assert len(k8s_objects) == 12 for k8s_object in k8s_objects: if k8s_object["kind"] == "CronJob": @@ -233,17 +233,20 @@ def test_global_affinity_tolerations_topology_spread_constraints_and_node_select else: podSpec = jmespath.search("spec.template.spec", k8s_object) - assert "foo" == jmespath.search( - "affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - podSpec, + assert ( + jmespath.search( + "affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + podSpec, + ) + == "foo" ) - assert "user-node" == jmespath.search("nodeSelector.type", podSpec) - assert "static-pods" == jmespath.search("tolerations[0].key", podSpec) - assert "foo" == jmespath.search("topologySpreadConstraints[0].topologyKey", podSpec) + assert jmespath.search("nodeSelector.type", podSpec) == "user-node" + assert jmespath.search("tolerations[0].key", podSpec) == "static-pods" + assert jmespath.search("topologySpreadConstraints[0].topologyKey", podSpec) == "foo" @pytest.mark.parametrize( "expected_image,tag,digest", @@ -384,7 +387,7 @@ def test_have_all_config_mounts_on_init_containers(self): "templates/dag-processor/dag-processor-deployment.yaml", ], ) - assert 5 == len(docs) + assert len(docs) == 5 expected_mount = { "subPath": "airflow.cfg", "name": "config", @@ -424,7 +427,7 @@ def test_priority_class_name(self): ], ) - assert 10 == len(docs) + assert len(docs) == 10 for doc in docs: component = doc["metadata"]["labels"]["component"] if component == "airflow-cleanup-pods": diff --git a/helm_tests/airflow_aux/test_basic_helm_chart.py b/helm_tests/airflow_aux/test_basic_helm_chart.py index 3bdee86df0a8f..7d72a3205eeb4 100644 --- a/helm_tests/airflow_aux/test_basic_helm_chart.py +++ b/helm_tests/airflow_aux/test_basic_helm_chart.py @@ -153,8 +153,8 @@ def test_basic_deployments(self, version): if chart_name and "postgresql" in chart_name: continue k8s_name = k8s_object["kind"] + ":" + k8s_object["metadata"]["name"] - assert "TEST-VALUE" == labels.get( - "test-label" + assert ( + labels.get("test-label") == "TEST-VALUE" ), f"Missing label test-label on {k8s_name}. Current labels: {labels}" def test_basic_deployments_with_standard_naming(self): @@ -254,8 +254,8 @@ def test_basic_deployment_with_standalone_dag_processor(self, version): if chart_name and "postgresql" in chart_name: continue k8s_name = k8s_object["kind"] + ":" + k8s_object["metadata"]["name"] - assert "TEST-VALUE" == labels.get( - "test-label" + assert ( + labels.get("test-label") == "TEST-VALUE" ), f"Missing label test-label on {k8s_name}. Current labels: {labels}" @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) @@ -493,7 +493,7 @@ def test_annotations_on_airflow_pods_in_deployment(self): ) # pod_template_file is tested separately as it has extra setup steps - assert 8 == len(k8s_objects) + assert len(k8s_objects) == 8 for k8s_object in k8s_objects: annotations = k8s_object["spec"]["template"]["metadata"]["annotations"] @@ -600,8 +600,8 @@ def test_postgres_connection_url_no_override(self): show_only=["templates/secrets/metadata-connection-secret.yaml"], )[0] assert ( - "postgresql://postgres:postgres@my-release-postgresql.default:5432/postgres?sslmode=disable" - == base64.b64decode(doc["data"]["connection"]).decode("utf-8") + base64.b64decode(doc["data"]["connection"]).decode("utf-8") + == "postgresql://postgres:postgres@my-release-postgresql.default:5432/postgres?sslmode=disable" ) def test_postgres_connection_url_pgbouncer(self): @@ -612,9 +612,9 @@ def test_postgres_connection_url_pgbouncer(self): values={"pgbouncer": {"enabled": True}}, )[0] assert ( - "postgresql://postgres:postgres@my-release-pgbouncer.default:6543/" + base64.b64decode(doc["data"]["connection"]).decode("utf-8") + == "postgresql://postgres:postgres@my-release-pgbouncer.default:6543/" "my-release-metadata?sslmode=disable" - == base64.b64decode(doc["data"]["connection"]).decode("utf-8") ) def test_postgres_connection_url_pgbouncer_use_standard_naming(self): @@ -625,9 +625,9 @@ def test_postgres_connection_url_pgbouncer_use_standard_naming(self): values={"useStandardNaming": True, "pgbouncer": {"enabled": True}}, )[0] assert ( - "postgresql://postgres:postgres@my-release-airflow-pgbouncer.default:6543/" + base64.b64decode(doc["data"]["connection"]).decode("utf-8") + == "postgresql://postgres:postgres@my-release-airflow-pgbouncer.default:6543/" "my-release-metadata?sslmode=disable" - == base64.b64decode(doc["data"]["connection"]).decode("utf-8") ) def test_postgres_connection_url_name_override(self): @@ -639,8 +639,8 @@ def test_postgres_connection_url_name_override(self): )[0] assert ( - "postgresql://postgres:postgres@overrideName:5432/postgres?sslmode=disable" - == base64.b64decode(doc["data"]["connection"]).decode("utf-8") + base64.b64decode(doc["data"]["connection"]).decode("utf-8") + == "postgresql://postgres:postgres@overrideName:5432/postgres?sslmode=disable" ) def test_priority_classes(self): @@ -685,9 +685,10 @@ def test_redis_broker_connection_url(self): show_only=["templates/secrets/redis-secrets.yaml"], values={"redis": {"enabled": True, "password": "test1234"}}, )[1] - assert "redis://:test1234@my-release-redis:6379/0" == base64.b64decode( - doc["data"]["connection"] - ).decode("utf-8") + assert ( + base64.b64decode(doc["data"]["connection"]).decode("utf-8") + == "redis://:test1234@my-release-redis:6379/0" + ) def test_redis_broker_connection_url_use_standard_naming(self): # no nameoverride, redis and useStandardNaming @@ -696,9 +697,10 @@ def test_redis_broker_connection_url_use_standard_naming(self): show_only=["templates/secrets/redis-secrets.yaml"], values={"useStandardNaming": True, "redis": {"enabled": True, "password": "test1234"}}, )[1] - assert "redis://:test1234@my-release-airflow-redis:6379/0" == base64.b64decode( - doc["data"]["connection"] - ).decode("utf-8") + assert ( + base64.b64decode(doc["data"]["connection"]).decode("utf-8") + == "redis://:test1234@my-release-airflow-redis:6379/0" + ) @staticmethod def default_trigger_obj(version): diff --git a/helm_tests/airflow_aux/test_celery_kubernetes_executor.py b/helm_tests/airflow_aux/test_celery_kubernetes_executor.py index 4c34cf59411b8..bb69e798b5dc8 100644 --- a/helm_tests/airflow_aux/test_celery_kubernetes_executor.py +++ b/helm_tests/airflow_aux/test_celery_kubernetes_executor.py @@ -33,8 +33,8 @@ def test_should_create_a_worker_deployment_with_the_celery_executor(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) - assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) + assert jmespath.search("spec.template.spec.volumes[0].name", docs[0]) == "config" + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "dags" def test_should_create_a_worker_deployment_with_the_celery_kubernetes_executor(self): docs = render_chart( @@ -45,5 +45,5 @@ def test_should_create_a_worker_deployment_with_the_celery_kubernetes_executor(s show_only=["templates/workers/worker-deployment.yaml"], ) - assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) - assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) + assert jmespath.search("spec.template.spec.volumes[0].name", docs[0]) == "config" + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "dags" diff --git a/helm_tests/airflow_aux/test_cleanup_pods.py b/helm_tests/airflow_aux/test_cleanup_pods.py index d63132f50bd51..843606dca3214 100644 --- a/helm_tests/airflow_aux/test_cleanup_pods.py +++ b/helm_tests/airflow_aux/test_cleanup_pods.py @@ -81,8 +81,9 @@ def test_should_create_cronjob_for_enabled_cleanup(self): show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert "airflow-cleanup-pods" == jmespath.search( - "spec.jobTemplate.spec.template.spec.containers[0].name", docs[0] + assert ( + jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].name", docs[0]) + == "airflow-cleanup-pods" ) assert jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].image", docs[0]).startswith( "apache/airflow" @@ -115,8 +116,9 @@ def test_should_change_image_when_set_airflow_image(self): show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert "airflow:test" == jmespath.search( - "spec.jobTemplate.spec.template.spec.containers[0].image", docs[0] + assert ( + jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].image", docs[0]) + == "airflow:test" ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): @@ -146,22 +148,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert "CronJob" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.jobTemplate.spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "CronJob" + assert ( + jmespath.search( + "spec.jobTemplate.spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.jobTemplate.spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.jobTemplate.spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.jobTemplate.spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.jobTemplate.spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_scheduler_name(self): @@ -170,9 +181,12 @@ def test_scheduler_name(self): show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.jobTemplate.spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.jobTemplate.spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_default_command_and_args(self): @@ -181,9 +195,11 @@ def test_default_command_and_args(self): ) assert jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].command", docs[0]) is None - assert ["bash", "-c", "exec airflow kubernetes cleanup-pods --namespace=default"] == jmespath.search( - "spec.jobTemplate.spec.template.spec.containers[0].args", docs[0] - ) + assert jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].args", docs[0]) == [ + "bash", + "-c", + "exec airflow kubernetes cleanup-pods --namespace=default", + ] def test_should_add_extraEnvs(self): docs = render_chart( @@ -225,10 +241,10 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert ["release-name"] == jmespath.search( - "spec.jobTemplate.spec.template.spec.containers[0].command", docs[0] - ) - assert ["Helm"] == jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].command", docs[0]) == [ + "release-name" + ] + assert jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_should_set_labels_to_jobs_from_cronjob(self): docs = render_chart( @@ -239,12 +255,12 @@ def test_should_set_labels_to_jobs_from_cronjob(self): show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert { + assert jmespath.search("spec.jobTemplate.spec.template.metadata.labels", docs[0]) == { "tier": "airflow", "component": "airflow-cleanup-pods", "release": "release-name", "project": "airflow", - } == jmespath.search("spec.jobTemplate.spec.template.metadata.labels", docs[0]) + } def test_should_add_component_specific_labels(self): docs = render_chart( @@ -277,17 +293,17 @@ def test_should_add_component_specific_annotations(self): assert "test_cronjob_annotation" in jmespath.search("metadata.annotations", docs[0]) assert ( - "test_cronjob_annotation_value" - == jmespath.search("metadata.annotations", docs[0])["test_cronjob_annotation"] + jmespath.search("metadata.annotations", docs[0])["test_cronjob_annotation"] + == "test_cronjob_annotation_value" ) assert "test_pod_annotation" in jmespath.search( "spec.jobTemplate.spec.template.metadata.annotations", docs[0] ) assert ( - "test_pod_annotation_value" - == jmespath.search("spec.jobTemplate.spec.template.metadata.annotations", docs[0])[ + jmespath.search("spec.jobTemplate.spec.template.metadata.annotations", docs[0])[ "test_pod_annotation" ] + == "test_pod_annotation_value" ) def test_cleanup_resources_are_configurable(self): @@ -326,8 +342,8 @@ def test_should_set_job_history_limits(self): }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert 2 == jmespath.search("spec.failedJobsHistoryLimit", docs[0]) - assert 4 == jmespath.search("spec.successfulJobsHistoryLimit", docs[0]) + assert jmespath.search("spec.failedJobsHistoryLimit", docs[0]) == 2 + assert jmespath.search("spec.successfulJobsHistoryLimit", docs[0]) == 4 def test_should_set_zero_job_history_limits(self): docs = render_chart( @@ -340,8 +356,8 @@ def test_should_set_zero_job_history_limits(self): }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert 0 == jmespath.search("spec.failedJobsHistoryLimit", docs[0]) - assert 0 == jmespath.search("spec.successfulJobsHistoryLimit", docs[0]) + assert jmespath.search("spec.failedJobsHistoryLimit", docs[0]) == 0 + assert jmespath.search("spec.successfulJobsHistoryLimit", docs[0]) == 0 def test_no_airflow_local_settings(self): docs = render_chart( diff --git a/helm_tests/airflow_aux/test_configmap.py b/helm_tests/airflow_aux/test_configmap.py index 623f3f1067e10..8cb88dc184ca9 100644 --- a/helm_tests/airflow_aux/test_configmap.py +++ b/helm_tests/airflow_aux/test_configmap.py @@ -34,7 +34,7 @@ def test_single_annotation(self): ) annotations = jmespath.search("metadata.annotations", docs[0]) - assert "value" == annotations.get("key") + assert annotations.get("key") == "value" def test_multiple_annotations(self): docs = render_chart( @@ -45,8 +45,8 @@ def test_multiple_annotations(self): ) annotations = jmespath.search("metadata.annotations", docs[0]) - assert "value" == annotations.get("key") - assert "value-two" == annotations.get("key-two") + assert annotations.get("key") == "value" + assert annotations.get("key-two") == "value-two" @pytest.mark.parametrize( "af_version, secret_key, secret_key_name, expected", @@ -73,7 +73,7 @@ def test_default_airflow_local_settings(self, af_version, secret_key, secret_key in jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() ) else: - assert "" == jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() + assert jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() == "" def test_airflow_local_settings(self): docs = render_chart( @@ -82,8 +82,8 @@ def test_airflow_local_settings(self): ) assert ( - "# Well hello release-name!" - == jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() + jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() + == "# Well hello release-name!" ) def test_kerberos_config_available_with_celery_executor(self): diff --git a/helm_tests/airflow_aux/test_create_user_job.py b/helm_tests/airflow_aux/test_create_user_job.py index dcce570cff3b3..ad3abd6584af4 100644 --- a/helm_tests/airflow_aux/test_create_user_job.py +++ b/helm_tests/airflow_aux/test_create_user_job.py @@ -27,9 +27,9 @@ class TestCreateUserJob: def test_should_run_by_default(self): docs = render_chart(show_only=["templates/jobs/create-user-job.yaml"]) - assert "Job" == docs[0]["kind"] - assert "create-user" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) - assert 50000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) + assert docs[0]["kind"] == "Job" + assert jmespath.search("spec.template.spec.containers[0].name", docs[0]) == "create-user" + assert jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) == 50000 def test_should_support_annotations(self): docs = render_chart( @@ -38,10 +38,10 @@ def test_should_support_annotations(self): ) annotations = jmespath.search("spec.template.metadata.annotations", docs[0]) assert "foo" in annotations - assert "bar" == annotations["foo"] + assert annotations["foo"] == "bar" job_annotations = jmespath.search("metadata.annotations", docs[0]) assert "fiz" in job_annotations - assert "fuz" == job_annotations["fiz"] + assert job_annotations["fiz"] == "fuz" def test_should_add_component_specific_labels(self): docs = render_chart( @@ -81,22 +81,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert "Job" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], - ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], - ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Job" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" + ) + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" + ) + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_scheduler_name(self): @@ -105,9 +114,12 @@ def test_scheduler_name(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_create_user_job_resources_are_configurable(self): @@ -161,10 +173,10 @@ def test_should_add_extra_containers(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_add_extra_init_containers(self): docs = render_chart( @@ -178,10 +190,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[0]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[0]", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -193,9 +205,9 @@ def test_should_template_extra_containers(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert {"name": "release-name-test-container"} == jmespath.search( - "spec.template.spec.containers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { + "name": "release-name-test-container" + } def test_should_add_extra_volumes(self): docs = render_chart( @@ -207,9 +219,10 @@ def test_should_add_extra_volumes(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert {"name": "myvolume-airflow", "emptyDir": {}} == jmespath.search( - "spec.template.spec.volumes[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.volumes[-1]", docs[0]) == { + "name": "myvolume-airflow", + "emptyDir": {}, + } def test_should_add_extra_volume_mounts(self): docs = render_chart( @@ -221,9 +234,10 @@ def test_should_add_extra_volume_mounts(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert {"name": "foobar-airflow", "mountPath": "foo/bar"} == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].volumeMounts[-1]", docs[0]) == { + "name": "foobar-airflow", + "mountPath": "foo/bar", + } def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( @@ -234,12 +248,14 @@ def test_should_add_global_volume_and_global_volume_mount(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert {"name": "myvolume", "emptyDir": {}} == jmespath.search( - "spec.template.spec.volumes[-1]", docs[0] - ) - assert {"name": "foobar", "mountPath": "foo/bar"} == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.volumes[-1]", docs[0]) == { + "name": "myvolume", + "emptyDir": {}, + } + assert jmespath.search("spec.template.spec.containers[0].volumeMounts[-1]", docs[0]) == { + "name": "foobar", + "mountPath": "foo/bar", + } def test_should_add_extraEnvs(self): docs = render_chart( @@ -381,8 +397,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/jobs/create-user-job.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_default_user_overrides(self): docs = render_chart( @@ -402,7 +418,7 @@ def test_default_user_overrides(self): ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None - assert [ + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [ "bash", "-c", 'exec \\\nairflow users create "$@"', @@ -419,7 +435,7 @@ def test_default_user_overrides(self): "Doe", "-p", "whereisjane?", - ] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + ] def test_no_airflow_local_settings(self): docs = render_chart( diff --git a/helm_tests/airflow_aux/test_logs_persistent_volume_claim.py b/helm_tests/airflow_aux/test_logs_persistent_volume_claim.py index c3217ee741ff9..86175e39b989c 100644 --- a/helm_tests/airflow_aux/test_logs_persistent_volume_claim.py +++ b/helm_tests/airflow_aux/test_logs_persistent_volume_claim.py @@ -30,7 +30,7 @@ def test_should_not_generate_a_document_if_persistence_is_disabled(self): show_only=["templates/logs-persistent-volume-claim.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_should_not_generate_a_document_when_using_an_existing_claim(self): docs = render_chart( @@ -38,7 +38,7 @@ def test_should_not_generate_a_document_when_using_an_existing_claim(self): show_only=["templates/logs-persistent-volume-claim.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_should_generate_a_document_if_persistence_is_enabled_and_not_using_an_existing_claim(self): docs = render_chart( @@ -46,7 +46,7 @@ def test_should_generate_a_document_if_persistence_is_enabled_and_not_using_an_e show_only=["templates/logs-persistent-volume-claim.yaml"], ) - assert 1 == len(docs) + assert len(docs) == 1 def test_should_set_pvc_details_correctly(self): docs = render_chart( @@ -63,11 +63,11 @@ def test_should_set_pvc_details_correctly(self): show_only=["templates/logs-persistent-volume-claim.yaml"], ) - assert { + assert jmespath.search("spec", docs[0]) == { "accessModes": ["ReadWriteMany"], "resources": {"requests": {"storage": "1G"}}, "storageClassName": "MyStorageClass", - } == jmespath.search("spec", docs[0]) + } def test_logs_persistent_volume_claim_template_storage_class_name(self): docs = render_chart( @@ -82,4 +82,4 @@ def test_logs_persistent_volume_claim_template_storage_class_name(self): }, show_only=["templates/logs-persistent-volume-claim.yaml"], ) - assert "release-name-storage-class" == jmespath.search("spec.storageClassName", docs[0]) + assert jmespath.search("spec.storageClassName", docs[0]) == "release-name-storage-class" diff --git a/helm_tests/airflow_aux/test_migrate_database_job.py b/helm_tests/airflow_aux/test_migrate_database_job.py index 426a35edc424e..3db199293f217 100644 --- a/helm_tests/airflow_aux/test_migrate_database_job.py +++ b/helm_tests/airflow_aux/test_migrate_database_job.py @@ -27,9 +27,9 @@ class TestMigrateDatabaseJob: def test_should_run_by_default(self): docs = render_chart(show_only=["templates/jobs/migrate-database-job.yaml"]) - assert "Job" == docs[0]["kind"] - assert "run-airflow-migrations" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) - assert 50000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) + assert docs[0]["kind"] == "Job" + assert jmespath.search("spec.template.spec.containers[0].name", docs[0]) == "run-airflow-migrations" + assert jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) == 50000 @pytest.mark.parametrize( "migrate_database_job_enabled,created", @@ -55,10 +55,10 @@ def test_should_support_annotations(self): ) annotations = jmespath.search("spec.template.metadata.annotations", docs[0]) assert "foo" in annotations - assert "bar" == annotations["foo"] + assert annotations["foo"] == "bar" job_annotations = jmespath.search("metadata.annotations", docs[0]) assert "fiz" in job_annotations - assert "fuz" == job_annotations["fiz"] + assert job_annotations["fiz"] == "fuz" def test_should_add_component_specific_labels(self): docs = render_chart( @@ -119,22 +119,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert "Job" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Job" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_scheduler_name(self): @@ -143,9 +152,12 @@ def test_scheduler_name(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) @pytest.mark.parametrize( @@ -185,10 +197,10 @@ def test_should_add_extra_containers(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_add_extra_init_containers(self): docs = render_chart( @@ -202,10 +214,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[0]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[0]", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -217,9 +229,9 @@ def test_should_template_extra_containers(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert {"name": "release-name-test-container"} == jmespath.search( - "spec.template.spec.containers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { + "name": "release-name-test-container" + } def test_set_resources(self): docs = render_chart( @@ -240,7 +252,7 @@ def test_set_resources(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == { "requests": { "cpu": "1000mi", "memory": "512Mi", @@ -249,7 +261,7 @@ def test_set_resources(self): "cpu": "1000mi", "memory": "512Mi", }, - } == jmespath.search("spec.template.spec.containers[0].resources", docs[0]) + } def test_should_disable_default_helm_hooks(self): docs = render_chart( @@ -278,9 +290,10 @@ def test_should_add_extra_volumes(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert {"name": "myvolume-airflow", "emptyDir": {}} == jmespath.search( - "spec.template.spec.volumes[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.volumes[-1]", docs[0]) == { + "name": "myvolume-airflow", + "emptyDir": {}, + } def test_should_add_extra_volume_mounts(self): docs = render_chart( @@ -292,9 +305,10 @@ def test_should_add_extra_volume_mounts(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert {"name": "foobar-airflow", "mountPath": "foo/bar"} == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].volumeMounts[-1]", docs[0]) == { + "name": "foobar-airflow", + "mountPath": "foo/bar", + } def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( @@ -305,12 +319,14 @@ def test_should_add_global_volume_and_global_volume_mount(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert {"name": "myvolume", "emptyDir": {}} == jmespath.search( - "spec.template.spec.volumes[-1]", docs[0] - ) - assert {"name": "foobar", "mountPath": "foo/bar"} == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.volumes[-1]", docs[0]) == { + "name": "myvolume", + "emptyDir": {}, + } + assert jmespath.search("spec.template.spec.containers[0].volumeMounts[-1]", docs[0]) == { + "name": "foobar", + "mountPath": "foo/bar", + } def test_job_ttl_after_finished(self): docs = render_chart( @@ -378,8 +394,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/jobs/migrate-database-job.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_no_airflow_local_settings(self): docs = render_chart( diff --git a/helm_tests/airflow_aux/test_pod_launcher_role.py b/helm_tests/airflow_aux/test_pod_launcher_role.py index c1336268824c6..595f5ed591bcf 100644 --- a/helm_tests/airflow_aux/test_pod_launcher_role.py +++ b/helm_tests/airflow_aux/test_pod_launcher_role.py @@ -48,7 +48,7 @@ def test_pod_launcher_role(self, executor, rbac, allow, expected_accounts): for idx, suffix in enumerate(expected_accounts): assert f"release-name-airflow-{suffix}" == jmespath.search(f"subjects[{idx}].name", docs[0]) else: - assert [] == docs + assert docs == [] @pytest.mark.parametrize( "multiNamespaceMode, namespace, expectedRole, expectedRoleBinding", diff --git a/helm_tests/airflow_aux/test_pod_template_file.py b/helm_tests/airflow_aux/test_pod_template_file.py index 5a507b7ee6aae..ddd35e63030f8 100644 --- a/helm_tests/airflow_aux/test_pod_template_file.py +++ b/helm_tests/airflow_aux/test_pod_template_file.py @@ -58,7 +58,7 @@ def test_should_work(self): assert re.search("Pod", docs[0]["kind"]) assert jmespath.search("spec.containers[0].image", docs[0]) is not None - assert "base" == jmespath.search("spec.containers[0].name", docs[0]) + assert jmespath.search("spec.containers[0].name", docs[0]) == "base" def test_should_add_an_init_container_if_git_sync_is_true(self): docs = render_chart( @@ -95,7 +95,7 @@ def test_should_add_an_init_container_if_git_sync_is_true(self): ) assert re.search("Pod", docs[0]["kind"]) - assert { + assert jmespath.search("spec.initContainers[0]", docs[0]) == { "name": "git-sync-test-init", "securityContext": {"runAsUser": 65533}, "image": "test-registry/test-repo:test-tag", @@ -123,7 +123,7 @@ def test_should_add_an_init_container_if_git_sync_is_true(self): ], "volumeMounts": [{"mountPath": "/git", "name": "dags"}], "resources": {}, - } == jmespath.search("spec.initContainers[0]", docs[0]) + } def test_should_not_add_init_container_if_dag_persistence_is_true(self): docs = render_chart( @@ -356,9 +356,9 @@ def test_should_set_a_custom_image_in_pod_template(self): ) assert re.search("Pod", docs[0]["kind"]) - assert "dummy_image:latest" == jmespath.search("spec.containers[0].image", docs[0]) - assert "Always" == jmespath.search("spec.containers[0].imagePullPolicy", docs[0]) - assert "base" == jmespath.search("spec.containers[0].name", docs[0]) + assert jmespath.search("spec.containers[0].image", docs[0]) == "dummy_image:latest" + assert jmespath.search("spec.containers[0].imagePullPolicy", docs[0]) == "Always" + assert jmespath.search("spec.containers[0].name", docs[0]) == "base" def test_mount_airflow_cfg(self): docs = render_chart( @@ -405,21 +405,30 @@ def test_should_use_global_affinity_tolerations_and_node_selector(self): ) assert re.search("Pod", docs[0]["kind"]) - assert "foo" == jmespath.search( - "spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], - ) - assert "ssd" == jmespath.search( - "spec.nodeSelector.diskType", - docs[0], - ) - assert "dynamic-pods" == jmespath.search( - "spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" + ) + assert ( + jmespath.search( + "spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" + ) + assert ( + jmespath.search( + "spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_should_create_valid_affinity_tolerations_topology_spread_constraints_and_node_selector(self): @@ -458,26 +467,38 @@ def test_should_create_valid_affinity_tolerations_topology_spread_constraints_an chart_dir=self.temp_chart_dir, ) - assert "Pod" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], - ) - assert "ssd" == jmespath.search( - "spec.nodeSelector.diskType", - docs[0], - ) - assert "dynamic-pods" == jmespath.search( - "spec.tolerations[0].key", - docs[0], - ) - assert "foo" == jmespath.search( - "spec.topologySpreadConstraints[0].topologyKey", - docs[0], + assert jmespath.search("kind", docs[0]) == "Pod" + assert ( + jmespath.search( + "spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" + ) + assert ( + jmespath.search( + "spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" + ) + assert ( + jmespath.search( + "spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" + ) + assert ( + jmespath.search( + "spec.topologySpreadConstraints[0].topologyKey", + docs[0], + ) + == "foo" ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): @@ -543,13 +564,16 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec ) assert expected_affinity == jmespath.search("spec.affinity", docs[0]) - assert "ssd" == jmespath.search( - "spec.nodeSelector.type", - docs[0], + assert ( + jmespath.search( + "spec.nodeSelector.type", + docs[0], + ) + == "ssd" ) tolerations = jmespath.search("spec.tolerations", docs[0]) - assert 1 == len(tolerations) - assert "dynamic-pods" == tolerations[0]["key"] + assert len(tolerations) == 1 + assert tolerations[0]["key"] == "dynamic-pods" assert expected_topology_spread_constraints == jmespath.search( "spec.topologySpreadConstraints[0]", docs[0] ) @@ -561,15 +585,18 @@ def test_scheduler_name(self): chart_dir=self.temp_chart_dir, ) - assert "airflow-scheduler" == jmespath.search( - "spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_not_create_default_affinity(self): docs = render_chart(show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir) - assert {} == jmespath.search("spec.affinity", docs[0]) + assert jmespath.search("spec.affinity", docs[0]) == {} def test_should_add_fsgroup_to_the_pod_template(self): docs = render_chart( @@ -721,10 +748,10 @@ def test_should_add_extra_init_containers(self): chart_dir=self.temp_chart_dir, ) - assert { + assert jmespath.search("spec.initContainers[-1]", docs[0]) == { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.initContainers[-1]", docs[0]) + } def test_should_template_extra_init_containers(self): docs = render_chart( @@ -737,9 +764,9 @@ def test_should_template_extra_init_containers(self): chart_dir=self.temp_chart_dir, ) - assert { + assert jmespath.search("spec.initContainers[-1]", docs[0]) == { "name": "release-name-test-init-container", - } == jmespath.search("spec.initContainers[-1]", docs[0]) + } def test_should_add_extra_containers(self): docs = render_chart( @@ -754,10 +781,10 @@ def test_should_add_extra_containers(self): chart_dir=self.temp_chart_dir, ) - assert { + assert jmespath.search("spec.containers[-1]", docs[0]) == { "name": "test-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.containers[-1]", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -770,9 +797,9 @@ def test_should_template_extra_containers(self): chart_dir=self.temp_chart_dir, ) - assert { + assert jmespath.search("spec.containers[-1]", docs[0]) == { "name": "release-name-test-container", - } == jmespath.search("spec.containers[-1]", docs[0]) + } def test_should_add_pod_labels(self): docs = render_chart( @@ -781,13 +808,13 @@ def test_should_add_pod_labels(self): chart_dir=self.temp_chart_dir, ) - assert { + assert jmespath.search("metadata.labels", docs[0]) == { "label1": "value1", "label2": "value2", "release": "release-name", "component": "worker", "tier": "airflow", - } == jmespath.search("metadata.labels", docs[0]) + } def test_should_add_extraEnvs(self): docs = render_chart( @@ -851,7 +878,7 @@ def test_should_add_resources(self): chart_dir=self.temp_chart_dir, ) - assert { + assert jmespath.search("spec.containers[0].resources", docs[0]) == { "limits": { "cpu": "2", "memory": "3Gi", @@ -860,7 +887,7 @@ def test_should_add_resources(self): "cpu": "1", "memory": "2Gi", }, - } == jmespath.search("spec.containers[0].resources", docs[0]) + } def test_empty_resources(self): docs = render_chart( @@ -868,7 +895,7 @@ def test_empty_resources(self): show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) - assert {} == jmespath.search("spec.containers[0].resources", docs[0]) + assert jmespath.search("spec.containers[0].resources", docs[0]) == {} def test_workers_host_aliases(self): docs = render_chart( @@ -881,8 +908,8 @@ def test_workers_host_aliases(self): chart_dir=self.temp_chart_dir, ) - assert "127.0.0.2" == jmespath.search("spec.hostAliases[0].ip", docs[0]) - assert "test.hostname" == jmespath.search("spec.hostAliases[0].hostnames[0]", docs[0]) + assert jmespath.search("spec.hostAliases[0].ip", docs[0]) == "127.0.0.2" + assert jmespath.search("spec.hostAliases[0].hostnames[0]", docs[0]) == "test.hostname" def test_workers_priority_class_name(self): docs = render_chart( @@ -895,7 +922,7 @@ def test_workers_priority_class_name(self): chart_dir=self.temp_chart_dir, ) - assert "test-priority" == jmespath.search("spec.priorityClassName", docs[0]) + assert jmespath.search("spec.priorityClassName", docs[0]) == "test-priority" def test_workers_container_lifecycle_webhooks_are_configurable(self, hook_type="preStop"): lifecycle_hook_params = CONTAINER_LIFECYCLE_PARAMETERS[hook_type] @@ -924,7 +951,7 @@ def test_termination_grace_period_seconds(self): chart_dir=self.temp_chart_dir, ) - assert 123 == jmespath.search("spec.terminationGracePeriodSeconds", docs[0]) + assert jmespath.search("spec.terminationGracePeriodSeconds", docs[0]) == 123 def test_runtime_class_name_values_are_configurable(self): docs = render_chart( diff --git a/helm_tests/airflow_core/test_dag_processor.py b/helm_tests/airflow_core/test_dag_processor.py index 287896369fd71..6e19b68ae286d 100644 --- a/helm_tests/airflow_core/test_dag_processor.py +++ b/helm_tests/airflow_core/test_dag_processor.py @@ -52,7 +52,7 @@ def test_can_be_disabled(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_disable_wait_for_migration(self): docs = render_chart( @@ -88,9 +88,10 @@ def test_wait_for_migration_security_contexts_are_configurable(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( - "spec.template.spec.initContainers[0].securityContext", docs[0] - ) + assert jmespath.search("spec.template.spec.initContainers[0].securityContext", docs[0]) == { + "allowPrivilegeEscalation": False, + "readOnlyRootFilesystem": True, + } def test_should_add_extra_containers(self): docs = render_chart( @@ -105,10 +106,10 @@ def test_should_add_extra_containers(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -121,9 +122,9 @@ def test_should_template_extra_containers(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert {"name": "release-name-test-container"} == jmespath.search( - "spec.template.spec.containers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { + "name": "release-name-test-container" + } def test_should_add_extra_init_containers(self): docs = render_chart( @@ -138,10 +139,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) + } def test_should_template_extra_init_containers(self): docs = render_chart( @@ -154,9 +155,9 @@ def test_should_template_extra_init_containers(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert {"name": "release-name-test-init-container"} == jmespath.search( - "spec.template.spec.initContainers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { + "name": "release-name-test-init-container" + } def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( @@ -172,12 +173,14 @@ def test_should_add_extra_volume_and_extra_volume_mount(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "test-volume-airflow" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[0].name", docs[0]) + == "test-volume-airflow" ) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.initContainers[0].volumeMounts[0].name", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].volumeMounts[0].name", docs[0]) + == "test-volume-airflow" ) def test_should_add_global_volume_and_global_volume_mount(self): @@ -190,12 +193,13 @@ def test_should_add_global_volume_and_global_volume_mount(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert "test-volume" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) - assert "test-volume" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "test-volume" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[0].name", docs[0]) == "test-volume" ) - assert "test-volume" == jmespath.search( - "spec.template.spec.initContainers[0].volumeMounts[0].name", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].volumeMounts[0].name", docs[0]) + == "test-volume" ) def test_should_add_extraEnvs(self): @@ -254,9 +258,12 @@ def test_scheduler_name(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): @@ -286,22 +293,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert "Deployment" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Deployment" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): @@ -367,13 +383,16 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.type", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.type", + docs[0], + ) + == "ssd" ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) - assert 1 == len(tolerations) - assert "dynamic-pods" == tolerations[0]["key"] + assert len(tolerations) == 1 + assert tolerations[0]["key"] == "dynamic-pods" assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) @@ -381,12 +400,12 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) - assert {"component": "scheduler"} == jmespath.search( + assert jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], - ) + ) == {"component": "scheduler"} def test_livenessprobe_values_are_configurable(self): docs = render_chart( @@ -405,20 +424,24 @@ def test_livenessprobe_values_are_configurable(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert 111 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0]) + == 111 ) - assert 222 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0]) == 222 ) - assert 333 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0]) == 333 ) - assert 444 == jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) == 444 - assert ["sh", "-c", "echo", "wow such test"] == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0]) == [ + "sh", + "-c", + "echo", + "wow such test", + ] @pytest.mark.parametrize( "airflow_version, probe_command", @@ -465,9 +488,10 @@ def test_logs_persistence_changes_volume(self, log_values, expected_volume): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert {"name": "logs", **expected_volume} == jmespath.search( - "spec.template.spec.volumes[1]", docs[0] - ) + assert jmespath.search("spec.template.spec.volumes[1]", docs[0]) == { + "name": "logs", + **expected_volume, + } def test_resources_are_configurable(self): docs = render_chart( @@ -482,22 +506,24 @@ def test_resources_are_configurable(self): }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) == "200m" + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" - assert "128Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.limits.memory", docs[0]) + == "128Mi" ) - assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) == "200m" + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.memory", docs[0]) + == "169Mi" ) - assert "300m" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.cpu", docs[0]) == "300m" ) def test_resources_are_not_added_by_default(self): @@ -535,9 +561,11 @@ def test_default_command_and_args(self): ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None - assert ["bash", "-c", "exec airflow dag-processor"] == jmespath.search( - "spec.template.spec.containers[0].args", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [ + "bash", + "-c", + "exec airflow dag-processor", + ] @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", @@ -589,8 +617,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_dags_volume_mount_with_persistence_true(self): docs = render_chart( diff --git a/helm_tests/airflow_core/test_rpc_server.py b/helm_tests/airflow_core/test_rpc_server.py index 106b2d38e8152..7bbc944cfd6c9 100644 --- a/helm_tests/airflow_core/test_rpc_server.py +++ b/helm_tests/airflow_core/test_rpc_server.py @@ -153,14 +153,17 @@ def test_should_use_templated_base_url_for_probes(self): assert {"name": "Host", "value": "release-name.com"} in jmespath.search( "startupProbe.httpGet.httpHeaders", container ) - assert "/mypath/release-name/path/internal_api/v1/health" == jmespath.search( - "livenessProbe.httpGet.path", container + assert ( + jmespath.search("livenessProbe.httpGet.path", container) + == "/mypath/release-name/path/internal_api/v1/health" ) - assert "/mypath/release-name/path/internal_api/v1/health" == jmespath.search( - "readinessProbe.httpGet.path", container + assert ( + jmespath.search("readinessProbe.httpGet.path", container) + == "/mypath/release-name/path/internal_api/v1/health" ) - assert "/mypath/release-name/path/internal_api/v1/health" == jmespath.search( - "startupProbe.httpGet.path", container + assert ( + jmespath.search("startupProbe.httpGet.path", container) + == "/mypath/release-name/path/internal_api/v1/health" ) def test_should_add_scheme_to_liveness_and_readiness_and_startup_probes(self): @@ -234,12 +237,14 @@ def test_should_add_extra_volume_and_extra_volume_mount(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) == "test-volume-airflow" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[-1].name", docs[0]) + == "test-volume-airflow" ) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0]) + == "test-volume-airflow" ) def test_should_add_global_volume_and_global_volume_mount(self): @@ -252,9 +257,10 @@ def test_should_add_global_volume_and_global_volume_mount(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert "test-volume" == jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) - assert "test-volume" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) == "test-volume" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[-1].name", docs[0]) + == "test-volume" ) def test_should_add_extraEnvs_to_wait_for_migration_container(self): @@ -322,10 +328,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) + } def test_should_add_component_specific_labels(self): docs = render_chart( @@ -368,22 +374,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert "Deployment" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Deployment" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_should_create_default_affinity(self): @@ -392,12 +407,12 @@ def test_should_create_default_affinity(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert {"component": "rpc-server"} == jmespath.search( + assert jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], - ) + ) == {"component": "rpc-server"} def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and rpc-server affinity etc, rpc-server affinity etc is used.""" @@ -462,13 +477,16 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.type", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.type", + docs[0], + ) + == "ssd" ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) - assert 1 == len(tolerations) - assert "dynamic-pods" == tolerations[0]["key"] + assert len(tolerations) == 1 + assert tolerations[0]["key"] == "dynamic-pods" assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) @@ -479,9 +497,12 @@ def test_scheduler_name(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) @pytest.mark.parametrize( @@ -540,25 +561,27 @@ def test_rpc_server_resources_are_configurable(self): }, show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) == "200m" - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" # initContainer wait-for-airflow-migrations - assert "128Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.limits.memory", docs[0]) + == "128Mi" ) - assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) + assert jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) == "200m" - assert "169Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.memory", docs[0]) + == "169Mi" ) - assert "300m" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.cpu", docs[0]) == "300m" ) def test_rpc_server_security_contexts_are_configurable(self): @@ -582,16 +605,17 @@ def test_rpc_server_security_contexts_are_configurable(self): }, show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( - "spec.template.spec.containers[0].securityContext", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].securityContext", docs[0]) == { + "allowPrivilegeEscalation": False, + "readOnlyRootFilesystem": True, + } - assert { + assert jmespath.search("spec.template.spec.securityContext", docs[0]) == { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, - } == jmespath.search("spec.template.spec.securityContext", docs[0]) + } def test_rpc_server_security_context_legacy(self): with pytest.raises(CalledProcessError, match="Additional property securityContext is not allowed"): @@ -651,9 +675,10 @@ def test_default_command_and_args(self): ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["bash"] - assert ["-c", "exec airflow internal-api"] == jmespath.search( - "spec.template.spec.containers[0].args", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [ + "-c", + "exec airflow internal-api", + ] @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) @@ -678,8 +703,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_should_add_component_specific_annotations(self): docs = render_chart( @@ -705,8 +730,8 @@ def test_rpc_server_pod_hostaliases(self): show_only=["templates/rpc-server/rpc-server-deployment.yaml"], ) - assert "127.0.0.1" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) - assert "foo.local" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) + assert jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) == "127.0.0.1" + assert jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) == "foo.local" class TestRPCServerService: @@ -718,12 +743,14 @@ def test_default_service(self): show_only=["templates/rpc-server/rpc-server-service.yaml"], ) - assert "release-name-rpc-server" == jmespath.search("metadata.name", docs[0]) + assert jmespath.search("metadata.name", docs[0]) == "release-name-rpc-server" assert jmespath.search("metadata.annotations", docs[0]) is None - assert {"tier": "airflow", "component": "rpc-server", "release": "release-name"} == jmespath.search( - "spec.selector", docs[0] - ) - assert "ClusterIP" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("spec.selector", docs[0]) == { + "tier": "airflow", + "component": "rpc-server", + "release": "release-name", + } + assert jmespath.search("spec.type", docs[0]) == "ClusterIP" assert {"name": "rpc-server", "port": 9080} in jmespath.search("spec.ports", docs[0]) def test_overrides(self): @@ -743,11 +770,11 @@ def test_overrides(self): show_only=["templates/rpc-server/rpc-server-service.yaml"], ) - assert {"foo": "bar"} == jmespath.search("metadata.annotations", docs[0]) - assert "LoadBalancer" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("metadata.annotations", docs[0]) == {"foo": "bar"} + assert jmespath.search("spec.type", docs[0]) == "LoadBalancer" assert {"name": "rpc-server", "port": 9000} in jmespath.search("spec.ports", docs[0]) - assert "127.0.0.1" == jmespath.search("spec.loadBalancerIP", docs[0]) - assert ["10.123.0.0/16"] == jmespath.search("spec.loadBalancerSourceRanges", docs[0]) + assert jmespath.search("spec.loadBalancerIP", docs[0]) == "127.0.0.1" + assert jmespath.search("spec.loadBalancerSourceRanges", docs[0]) == ["10.123.0.0/16"] @pytest.mark.parametrize( "ports, expected_ports", @@ -826,7 +853,7 @@ def test_nodeport_service(self, ports, expected_ports): show_only=["templates/rpc-server/rpc-server-service.yaml"], ) - assert "NodePort" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("spec.type", docs[0]) == "NodePort" assert expected_ports == jmespath.search("spec.ports", docs[0]) @@ -837,7 +864,7 @@ def test_off_by_default(self): docs = render_chart( show_only=["templates/rpc-server/rpc-server-networkpolicy.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_defaults(self): docs = render_chart( @@ -855,11 +882,11 @@ def test_defaults(self): show_only=["templates/rpc-server/rpc-server-networkpolicy.yaml"], ) - assert 1 == len(docs) - assert "NetworkPolicy" == docs[0]["kind"] - assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( - "spec.ingress[0].from", docs[0] - ) + assert len(docs) == 1 + assert docs[0]["kind"] == "NetworkPolicy" + assert jmespath.search("spec.ingress[0].from", docs[0]) == [ + {"namespaceSelector": {"matchLabels": {"release": "myrelease"}}} + ] assert jmespath.search("spec.ingress[0].ports", docs[0]) == [{"port": 9080}] @pytest.mark.parametrize( diff --git a/helm_tests/airflow_core/test_scheduler.py b/helm_tests/airflow_core/test_scheduler.py index 8968cd7c2c59f..0bef3e7e1322b 100644 --- a/helm_tests/airflow_core/test_scheduler.py +++ b/helm_tests/airflow_core/test_scheduler.py @@ -64,10 +64,10 @@ def test_should_add_extra_containers(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -80,9 +80,9 @@ def test_should_template_extra_containers(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert {"name": "release-name-test-container"} == jmespath.search( - "spec.template.spec.containers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { + "name": "release-name-test-container" + } def test_disable_wait_for_migration(self): docs = render_chart( @@ -110,10 +110,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) + } def test_should_template_extra_init_containers(self): docs = render_chart( @@ -125,9 +125,9 @@ def test_should_template_extra_init_containers(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert {"name": "release-name-test-init-container"} == jmespath.search( - "spec.template.spec.initContainers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { + "name": "release-name-test-init-container" + } def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( @@ -147,8 +147,9 @@ def test_should_add_extra_volume_and_extra_volume_mount(self): assert "test-volume-airflow" in jmespath.search( "spec.template.spec.containers[0].volumeMounts[*].name", docs[0] ) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0]) + == "test-volume-airflow" ) def test_should_add_global_volume_and_global_volume_mount(self): @@ -270,22 +271,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert "Deployment" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Deployment" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): @@ -350,13 +360,16 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.type", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.type", + docs[0], + ) + == "ssd" ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) - assert 1 == len(tolerations) - assert "dynamic-pods" == tolerations[0]["key"] + assert len(tolerations) == 1 + assert tolerations[0]["key"] == "dynamic-pods" assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) @@ -367,20 +380,23 @@ def test_scheduler_name(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) - assert {"component": "scheduler"} == jmespath.search( + assert jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], - ) + ) == {"component": "scheduler"} def test_livenessprobe_values_are_configurable(self): docs = render_chart( @@ -398,19 +414,23 @@ def test_livenessprobe_values_are_configurable(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert 111 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0] - ) - assert 222 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0] - ) - assert 333 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0]) + == 111 ) - assert 444 == jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) - assert ["sh", "-c", "echo", "wow such test"] == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0]) == 222 ) + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0]) == 333 + ) + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) == 444 + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0]) == [ + "sh", + "-c", + "echo", + "wow such test", + ] def test_startupprobe_values_are_configurable(self): docs = render_chart( @@ -427,14 +447,17 @@ def test_startupprobe_values_are_configurable(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert 111 == jmespath.search("spec.template.spec.containers[0].startupProbe.timeoutSeconds", docs[0]) - assert 222 == jmespath.search( - "spec.template.spec.containers[0].startupProbe.failureThreshold", docs[0] - ) - assert 333 == jmespath.search("spec.template.spec.containers[0].startupProbe.periodSeconds", docs[0]) - assert ["sh", "-c", "echo", "wow such test"] == jmespath.search( - "spec.template.spec.containers[0].startupProbe.exec.command", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].startupProbe.timeoutSeconds", docs[0]) == 111 + assert ( + jmespath.search("spec.template.spec.containers[0].startupProbe.failureThreshold", docs[0]) == 222 + ) + assert jmespath.search("spec.template.spec.containers[0].startupProbe.periodSeconds", docs[0]) == 333 + assert jmespath.search("spec.template.spec.containers[0].startupProbe.exec.command", docs[0]) == [ + "sh", + "-c", + "echo", + "wow such test", + ] @pytest.mark.parametrize( "airflow_version, probe_command", @@ -518,16 +541,17 @@ def test_scheduler_security_contexts_are_configurable(self): }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( - "spec.template.spec.containers[0].securityContext", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].securityContext", docs[0]) == { + "allowPrivilegeEscalation": False, + "readOnlyRootFilesystem": True, + } - assert { + assert jmespath.search("spec.template.spec.securityContext", docs[0]) == { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, - } == jmespath.search("spec.template.spec.securityContext", docs[0]) + } def test_scheduler_security_context_legacy(self): docs = render_chart( @@ -544,12 +568,12 @@ def test_scheduler_security_context_legacy(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.securityContext", docs[0]) == { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, - } == jmespath.search("spec.template.spec.securityContext", docs[0]) + } def test_scheduler_resources_are_configurable(self): docs = render_chart( @@ -563,22 +587,24 @@ def test_scheduler_resources_are_configurable(self): }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) == "200m" + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" - assert "128Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.limits.memory", docs[0]) + == "128Mi" ) - assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) == "200m" + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.memory", docs[0]) + == "169Mi" ) - assert "300m" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.cpu", docs[0]) == "300m" ) def test_scheduler_resources_are_not_added_by_default(self): @@ -678,9 +704,11 @@ def test_default_command_and_args(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None - assert ["bash", "-c", "exec airflow scheduler"] == jmespath.search( - "spec.template.spec.containers[0].args", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [ + "bash", + "-c", + "exec airflow scheduler", + ] @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) @@ -699,8 +727,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] @pytest.mark.parametrize( "dags_values", @@ -757,7 +785,7 @@ def test_dags_mount_and_gitsync_expected_with_dag_processor( vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] assert "dags" not in [vm["name"] for vm in jmespath.search("spec.template.spec.volumes", docs[0])] - assert 1 == len(jmespath.search("spec.template.spec.containers", docs[0])) + assert len(jmespath.search("spec.template.spec.containers", docs[0])) == 1 else: assert "dags" in [ vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) @@ -775,7 +803,7 @@ def test_persistence_volume_annotations(self): values={"executor": "LocalExecutor", "workers": {"persistence": {"annotations": {"foo": "bar"}}}}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert {"foo": "bar"} == jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) + assert jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) == {"foo": "bar"} @pytest.mark.parametrize( "executor", @@ -793,7 +821,7 @@ def test_scheduler_deployment_has_executor_label(self, executor): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert 1 == len(docs) + assert len(docs) == 1 assert executor == docs[0]["metadata"]["labels"].get("executor") def test_should_add_component_specific_annotations(self): @@ -818,8 +846,8 @@ def test_scheduler_pod_hostaliases(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert "127.0.0.1" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) - assert "foo.local" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) + assert jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) == "127.0.0.1" + assert jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) == "foo.local" def test_scheduler_template_storage_class_name(self): docs = render_chart( @@ -835,8 +863,9 @@ def test_scheduler_template_storage_class_name(self): }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert "release-name-storage-class" == jmespath.search( - "spec.volumeClaimTemplates[0].spec.storageClassName", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[0].spec.storageClassName", docs[0]) + == "release-name-storage-class" ) def test_persistent_volume_claim_retention_policy(self): @@ -853,9 +882,9 @@ def test_persistent_volume_claim_retention_policy(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert { + assert jmespath.search("spec.persistentVolumeClaimRetentionPolicy", docs[0]) == { "whenDeleted": "Delete", - } == jmespath.search("spec.persistentVolumeClaimRetentionPolicy", docs[0]) + } @pytest.mark.parametrize( "scheduler_values, expected", @@ -996,4 +1025,4 @@ def test_can_be_disabled(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 diff --git a/helm_tests/airflow_core/test_triggerer.py b/helm_tests/airflow_core/test_triggerer.py index f5e7f8d7b8d4b..54ff4d266a182 100644 --- a/helm_tests/airflow_core/test_triggerer.py +++ b/helm_tests/airflow_core/test_triggerer.py @@ -53,7 +53,7 @@ def test_can_be_disabled(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", @@ -102,10 +102,10 @@ def test_should_add_extra_containers(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -117,9 +117,9 @@ def test_should_template_extra_containers(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert {"name": "release-name-test-container"} == jmespath.search( - "spec.template.spec.containers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { + "name": "release-name-test-container" + } def test_should_add_extra_init_containers(self): docs = render_chart( @@ -133,10 +133,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) + } def test_should_template_extra_init_containers(self): docs = render_chart( @@ -148,9 +148,9 @@ def test_should_template_extra_init_containers(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert {"name": "release-name-test-init-container"} == jmespath.search( - "spec.template.spec.initContainers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { + "name": "release-name-test-init-container" + } def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( @@ -165,12 +165,14 @@ def test_should_add_extra_volume_and_extra_volume_mount(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "test-volume-airflow" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[0].name", docs[0]) + == "test-volume-airflow" ) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0]) + == "test-volume-airflow" ) def test_should_add_global_volume_and_global_volume_mount(self): @@ -182,9 +184,9 @@ def test_should_add_global_volume_and_global_volume_mount(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert "test-volume" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) - assert "test-volume" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "test-volume" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[0].name", docs[0]) == "test-volume" ) def test_should_add_extraEnvs(self): @@ -254,9 +256,12 @@ def test_scheduler_name(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): @@ -285,22 +290,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert "StatefulSet" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "StatefulSet" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): @@ -365,13 +379,16 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.type", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.type", + docs[0], + ) + == "ssd" ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) - assert 1 == len(tolerations) - assert "dynamic-pods" == tolerations[0]["key"] + assert len(tolerations) == 1 + assert tolerations[0]["key"] == "dynamic-pods" assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) @@ -379,12 +396,12 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) - assert {"component": "scheduler"} == jmespath.search( + assert jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], - ) + ) == {"component": "scheduler"} def test_livenessprobe_values_are_configurable(self): docs = render_chart( @@ -402,20 +419,24 @@ def test_livenessprobe_values_are_configurable(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert 111 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0]) + == 111 ) - assert 222 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0]) == 222 ) - assert 333 == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0]) == 333 ) - assert 444 == jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) == 444 - assert ["sh", "-c", "echo", "wow such test"] == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0]) == [ + "sh", + "-c", + "echo", + "wow such test", + ] @pytest.mark.parametrize( "airflow_version, probe_command", @@ -461,9 +482,10 @@ def test_logs_persistence_changes_volume(self, log_values, expected_volume): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert {"name": "logs", **expected_volume} == jmespath.search( - "spec.template.spec.volumes[1]", docs[0] - ) + assert jmespath.search("spec.template.spec.volumes[1]", docs[0]) == { + "name": "logs", + **expected_volume, + } def test_resources_are_configurable(self): docs = render_chart( @@ -477,22 +499,24 @@ def test_resources_are_configurable(self): }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) == "200m" + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" - assert "128Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.limits.memory", docs[0]) + == "128Mi" ) - assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) == "200m" + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.memory", docs[0]) + == "169Mi" ) - assert "300m" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.cpu", docs[0]) == "300m" ) def test_resources_are_not_added_by_default(self): @@ -552,9 +576,11 @@ def test_default_command_and_args(self): ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None - assert ["bash", "-c", "exec airflow triggerer"] == jmespath.search( - "spec.template.spec.containers[0].args", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [ + "bash", + "-c", + "exec airflow triggerer", + ] @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) @@ -575,8 +601,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_dags_gitsync_sidecar_and_init_container(self): docs = render_chart( @@ -648,16 +674,17 @@ def test_triggerer_pod_hostaliases(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert "127.0.0.1" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) - assert "foo.local" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) + assert jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) == "127.0.0.1" + assert jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) == "foo.local" def test_triggerer_template_storage_class_name(self): docs = render_chart( values={"triggerer": {"persistence": {"storageClassName": "{{ .Release.Name }}-storage-class"}}}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert "release-name-storage-class" == jmespath.search( - "spec.volumeClaimTemplates[0].spec.storageClassName", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[0].spec.storageClassName", docs[0]) + == "release-name-storage-class" ) def test_persistent_volume_claim_retention_policy(self): @@ -674,9 +701,9 @@ def test_persistent_volume_claim_retention_policy(self): show_only=["templates/triggerer/triggerer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.persistentVolumeClaimRetentionPolicy", docs[0]) == { "whenDeleted": "Delete", - } == jmespath.search("spec.persistentVolumeClaimRetentionPolicy", docs[0]) + } class TestTriggererServiceAccount: @@ -813,8 +840,8 @@ def test_mysql_db_backend_keda(self): }, show_only=["templates/triggerer/triggerer-kedaautoscaler.yaml"], ) - assert "1" == jmespath.search("spec.triggers[0].metadata.queryValue", docs[0]) + assert jmespath.search("spec.triggers[0].metadata.queryValue", docs[0]) == "1" assert jmespath.search("spec.triggers[0].metadata.targetQueryValue", docs[0]) is None - assert "KEDA_DB_CONN" == jmespath.search("spec.triggers[0].metadata.connectionStringFromEnv", docs[0]) + assert jmespath.search("spec.triggers[0].metadata.connectionStringFromEnv", docs[0]) == "KEDA_DB_CONN" assert jmespath.search("spec.triggers[0].metadata.connectionFromEnv", docs[0]) is None diff --git a/helm_tests/airflow_core/test_worker.py b/helm_tests/airflow_core/test_worker.py index de0d66a55b942..4a630b2b1562f 100644 --- a/helm_tests/airflow_core/test_worker.py +++ b/helm_tests/airflow_core/test_worker.py @@ -77,10 +77,10 @@ def test_should_add_extra_containers(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_persistent_volume_claim_retention_policy(self): docs = render_chart( @@ -96,9 +96,9 @@ def test_persistent_volume_claim_retention_policy(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert { + assert jmespath.search("spec.persistentVolumeClaimRetentionPolicy", docs[0]) == { "whenDeleted": "Delete", - } == jmespath.search("spec.persistentVolumeClaimRetentionPolicy", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -111,9 +111,9 @@ def test_should_template_extra_containers(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert {"name": "release-name-test-container"} == jmespath.search( - "spec.template.spec.containers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { + "name": "release-name-test-container" + } def test_disable_wait_for_migration(self): docs = render_chart( @@ -141,10 +141,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) + } def test_should_template_extra_init_containers(self): docs = render_chart( @@ -156,9 +156,9 @@ def test_should_template_extra_init_containers(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert {"name": "release-name-test-init-container"} == jmespath.search( - "spec.template.spec.initContainers[-1]", docs[0] - ) + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { + "name": "release-name-test-init-container" + } def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( @@ -174,12 +174,14 @@ def test_should_add_extra_volume_and_extra_volume_mount(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[0].name", docs[0]) == "test-volume-airflow" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[0].name", docs[0]) + == "test-volume-airflow" ) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0]) + == "test-volume-airflow" ) def test_should_add_global_volume_and_global_volume_mount(self): @@ -191,9 +193,9 @@ def test_should_add_global_volume_and_global_volume_mount(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "test-volume" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) - assert "test-volume" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[0].name", docs[0]) == "test-volume" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[0].name", docs[0]) == "test-volume" ) def test_should_add_extraEnvs(self): @@ -267,8 +269,8 @@ def test_workers_host_aliases(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "127.0.0.2" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) - assert "test.hostname" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) + assert jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) == "127.0.0.2" + assert jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) == "test.hostname" @pytest.mark.parametrize( "persistence, update_strategy, expected_update_strategy", @@ -342,22 +344,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "StatefulSet" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "StatefulSet" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): @@ -422,13 +433,16 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.type", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.type", + docs[0], + ) + == "ssd" ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) - assert 1 == len(tolerations) - assert "dynamic-pods" == tolerations[0]["key"] + assert len(tolerations) == 1 + assert tolerations[0]["key"] == "dynamic-pods" assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) @@ -439,20 +453,23 @@ def test_scheduler_name(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/workers/worker-deployment.yaml"]) - assert {"component": "worker"} == jmespath.search( + assert jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], - ) + ) == {"component": "worker"} def test_runtime_class_name_values_are_configurable(self): docs = render_chart( @@ -536,7 +553,7 @@ def test_extra_init_container_restart_policy_is_configurable(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "Always" == jmespath.search("spec.template.spec.initContainers[1].restartPolicy", docs[0]) + assert jmespath.search("spec.template.spec.initContainers[1].restartPolicy", docs[0]) == "Always" @pytest.mark.parametrize( "log_values, expected_volume", @@ -581,25 +598,27 @@ def test_worker_resources_are_configurable(self): show_only=["templates/workers/worker-deployment.yaml"], ) # main container - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) == "200m" - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" # initContainer wait-for-airflow-configurations - assert "128Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.limits.memory", docs[0]) + == "128Mi" ) - assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) + assert jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) == "200m" - assert "169Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.memory", docs[0]) + == "169Mi" ) - assert "300m" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.cpu", docs[0]) == "300m" ) def test_worker_resources_are_not_added_by_default(self): @@ -723,8 +742,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_dags_gitsync_sidecar_and_init_container(self): docs = render_chart( @@ -756,7 +775,7 @@ def test_persistence_volume_annotations(self): values={"workers": {"persistence": {"annotations": {"foo": "bar"}}}}, show_only=["templates/workers/worker-deployment.yaml"], ) - assert {"foo": "bar"} == jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) + assert jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) == {"foo": "bar"} def test_should_add_component_specific_annotations(self): docs = render_chart( @@ -934,25 +953,27 @@ def test_should_add_extra_volume_claim_templates(self): show_only=["templates/workers/worker-deployment.yaml"], ) - assert "test-volume-airflow-1" == jmespath.search( - "spec.volumeClaimTemplates[1].metadata.name", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[1].metadata.name", docs[0]) == "test-volume-airflow-1" ) - assert "test-volume-airflow-2" == jmespath.search( - "spec.volumeClaimTemplates[2].metadata.name", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[2].metadata.name", docs[0]) == "test-volume-airflow-2" ) - assert "storage-class-1" == jmespath.search( - "spec.volumeClaimTemplates[1].spec.storageClassName", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[1].spec.storageClassName", docs[0]) + == "storage-class-1" ) - assert "storage-class-2" == jmespath.search( - "spec.volumeClaimTemplates[2].spec.storageClassName", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[2].spec.storageClassName", docs[0]) + == "storage-class-2" ) - assert ["ReadWriteOnce"] == jmespath.search("spec.volumeClaimTemplates[1].spec.accessModes", docs[0]) - assert ["ReadWriteOnce"] == jmespath.search("spec.volumeClaimTemplates[2].spec.accessModes", docs[0]) - assert "10Gi" == jmespath.search( - "spec.volumeClaimTemplates[1].spec.resources.requests.storage", docs[0] + assert jmespath.search("spec.volumeClaimTemplates[1].spec.accessModes", docs[0]) == ["ReadWriteOnce"] + assert jmespath.search("spec.volumeClaimTemplates[2].spec.accessModes", docs[0]) == ["ReadWriteOnce"] + assert ( + jmespath.search("spec.volumeClaimTemplates[1].spec.resources.requests.storage", docs[0]) == "10Gi" ) - assert "20Gi" == jmespath.search( - "spec.volumeClaimTemplates[2].spec.resources.requests.storage", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[2].spec.resources.requests.storage", docs[0]) == "20Gi" ) @pytest.mark.parametrize( @@ -979,8 +1000,9 @@ def test_worker_template_storage_class_name(self): values={"workers": {"persistence": {"storageClassName": "{{ .Release.Name }}-storage-class"}}}, show_only=["templates/workers/worker-deployment.yaml"], ) - assert "release-name-storage-class" == jmespath.search( - "spec.volumeClaimTemplates[0].spec.storageClassName", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[0].spec.storageClassName", docs[0]) + == "release-name-storage-class" ) @@ -1076,10 +1098,10 @@ def test_mysql_db_backend_keda_worker(self): }, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) - assert "1" == jmespath.search("spec.triggers[0].metadata.queryValue", docs[0]) + assert jmespath.search("spec.triggers[0].metadata.queryValue", docs[0]) == "1" assert jmespath.search("spec.triggers[0].metadata.targetQueryValue", docs[0]) is None - assert "KEDA_DB_CONN" == jmespath.search("spec.triggers[0].metadata.connectionStringFromEnv", docs[0]) + assert jmespath.search("spec.triggers[0].metadata.connectionStringFromEnv", docs[0]) == "KEDA_DB_CONN" assert jmespath.search("spec.triggers[0].metadata.connectionFromEnv", docs[0]) is None diff --git a/helm_tests/other/test_dags_persistent_volume_claim.py b/helm_tests/other/test_dags_persistent_volume_claim.py index 8667bcc4c8eb0..c150d89168f8c 100644 --- a/helm_tests/other/test_dags_persistent_volume_claim.py +++ b/helm_tests/other/test_dags_persistent_volume_claim.py @@ -30,7 +30,7 @@ def test_should_not_generate_a_document_if_persistence_is_disabled(self): show_only=["templates/dags-persistent-volume-claim.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_should_not_generate_a_document_when_using_an_existing_claim(self): docs = render_chart( @@ -38,7 +38,7 @@ def test_should_not_generate_a_document_when_using_an_existing_claim(self): show_only=["templates/dags-persistent-volume-claim.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_should_generate_a_document_if_persistence_is_enabled_and_not_using_an_existing_claim(self): docs = render_chart( @@ -46,7 +46,7 @@ def test_should_generate_a_document_if_persistence_is_enabled_and_not_using_an_e show_only=["templates/dags-persistent-volume-claim.yaml"], ) - assert 1 == len(docs) + assert len(docs) == 1 def test_should_set_pvc_details_correctly(self): docs = render_chart( @@ -64,11 +64,11 @@ def test_should_set_pvc_details_correctly(self): show_only=["templates/dags-persistent-volume-claim.yaml"], ) - assert { + assert jmespath.search("spec", docs[0]) == { "accessModes": ["ReadWriteMany"], "resources": {"requests": {"storage": "1G"}}, "storageClassName": "MyStorageClass", - } == jmespath.search("spec", docs[0]) + } def test_single_annotation(self): docs = render_chart( @@ -88,7 +88,7 @@ def test_single_annotation(self): ) annotations = jmespath.search("metadata.annotations", docs[0]) - assert "value" == annotations.get("key") + assert annotations.get("key") == "value" def test_multiple_annotations(self): docs = render_chart( @@ -108,8 +108,8 @@ def test_multiple_annotations(self): ) annotations = jmespath.search("metadata.annotations", docs[0]) - assert "value" == annotations.get("key") - assert "value-two" == annotations.get("key-two") + assert annotations.get("key") == "value" + assert annotations.get("key-two") == "value-two" def test_dags_persistent_volume_claim_template_storage_class_name(self): docs = render_chart( @@ -124,4 +124,4 @@ def test_dags_persistent_volume_claim_template_storage_class_name(self): }, show_only=["templates/dags-persistent-volume-claim.yaml"], ) - assert "release-name-storage-class" == jmespath.search("spec.storageClassName", docs[0]) + assert jmespath.search("spec.storageClassName", docs[0]) == "release-name-storage-class" diff --git a/helm_tests/other/test_flower.py b/helm_tests/other/test_flower.py index 0f2f2dd66b76e..af4eaaa101a70 100644 --- a/helm_tests/other/test_flower.py +++ b/helm_tests/other/test_flower.py @@ -44,8 +44,8 @@ def test_create_flower(self, executor, flower_enabled, created): assert bool(docs) is created if created: - assert "release-name-flower" == jmespath.search("metadata.name", docs[0]) - assert "flower" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) + assert jmespath.search("metadata.name", docs[0]) == "release-name-flower" + assert jmespath.search("spec.template.spec.containers[0].name", docs[0]) == "flower" @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", @@ -123,8 +123,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/flower/flower-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_should_create_flower_deployment_with_authorization(self): docs = render_chart( @@ -135,15 +135,22 @@ def test_should_create_flower_deployment_with_authorization(self): show_only=["templates/flower/flower-deployment.yaml"], ) - assert "AIRFLOW__CELERY__FLOWER_BASIC_AUTH" == jmespath.search( - "spec.template.spec.containers[0].env[0].name", docs[0] - ) - assert ["curl", "--user", "$AIRFLOW__CELERY__FLOWER_BASIC_AUTH", "localhost:7777"] == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] - ) - assert ["curl", "--user", "$AIRFLOW__CELERY__FLOWER_BASIC_AUTH", "localhost:7777"] == jmespath.search( - "spec.template.spec.containers[0].readinessProbe.exec.command", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].env[0].name", docs[0]) + == "AIRFLOW__CELERY__FLOWER_BASIC_AUTH" ) + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0]) == [ + "curl", + "--user", + "$AIRFLOW__CELERY__FLOWER_BASIC_AUTH", + "localhost:7777", + ] + assert jmespath.search("spec.template.spec.containers[0].readinessProbe.exec.command", docs[0]) == [ + "curl", + "--user", + "$AIRFLOW__CELERY__FLOWER_BASIC_AUTH", + "localhost:7777", + ] def test_should_create_flower_deployment_without_authorization(self): docs = render_chart( @@ -154,15 +161,18 @@ def test_should_create_flower_deployment_without_authorization(self): show_only=["templates/flower/flower-deployment.yaml"], ) - assert "AIRFLOW__CORE__FERNET_KEY" == jmespath.search( - "spec.template.spec.containers[0].env[0].name", docs[0] - ) - assert ["curl", "localhost:7777"] == jmespath.search( - "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] - ) - assert ["curl", "localhost:7777"] == jmespath.search( - "spec.template.spec.containers[0].readinessProbe.exec.command", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].env[0].name", docs[0]) + == "AIRFLOW__CORE__FERNET_KEY" ) + assert jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0]) == [ + "curl", + "localhost:7777", + ] + assert jmespath.search("spec.template.spec.containers[0].readinessProbe.exec.command", docs[0]) == [ + "curl", + "localhost:7777", + ] def test_scheduler_name(self): docs = render_chart( @@ -170,9 +180,12 @@ def test_scheduler_name(self): show_only=["templates/flower/flower-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): @@ -202,22 +215,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/flower/flower-deployment.yaml"], ) - assert "Deployment" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], - ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], - ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Deployment" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" + ) + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" + ) + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_flower_resources_are_configurable(self): @@ -233,11 +255,11 @@ def test_flower_resources_are_configurable(self): }, show_only=["templates/flower/flower-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" def test_flower_resources_are_not_added_by_default(self): docs = render_chart( @@ -259,10 +281,10 @@ def test_should_add_extra_containers(self): show_only=["templates/flower/flower-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( @@ -398,12 +420,12 @@ def test_probe_values_are_configurable(self, probe): show_only=["templates/flower/flower-deployment.yaml"], ) - assert 111 == jmespath.search( - f"spec.template.spec.containers[0].{probe}.initialDelaySeconds", docs[0] + assert ( + jmespath.search(f"spec.template.spec.containers[0].{probe}.initialDelaySeconds", docs[0]) == 111 ) - assert 222 == jmespath.search(f"spec.template.spec.containers[0].{probe}.timeoutSeconds", docs[0]) - assert 333 == jmespath.search(f"spec.template.spec.containers[0].{probe}.failureThreshold", docs[0]) - assert 444 == jmespath.search(f"spec.template.spec.containers[0].{probe}.periodSeconds", docs[0]) + assert jmespath.search(f"spec.template.spec.containers[0].{probe}.timeoutSeconds", docs[0]) == 222 + assert jmespath.search(f"spec.template.spec.containers[0].{probe}.failureThreshold", docs[0]) == 333 + assert jmespath.search(f"spec.template.spec.containers[0].{probe}.periodSeconds", docs[0]) == 444 class TestFlowerService: @@ -428,7 +450,7 @@ def test_create_flower(self, executor, flower_enabled, created): assert bool(docs) is created if created: - assert "release-name-flower" == jmespath.search("metadata.name", docs[0]) + assert jmespath.search("metadata.name", docs[0]) == "release-name-flower" def test_default_service(self): docs = render_chart( @@ -436,12 +458,14 @@ def test_default_service(self): show_only=["templates/flower/flower-service.yaml"], ) - assert "release-name-flower" == jmespath.search("metadata.name", docs[0]) + assert jmespath.search("metadata.name", docs[0]) == "release-name-flower" assert jmespath.search("metadata.annotations", docs[0]) is None - assert {"tier": "airflow", "component": "flower", "release": "release-name"} == jmespath.search( - "spec.selector", docs[0] - ) - assert "ClusterIP" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("spec.selector", docs[0]) == { + "tier": "airflow", + "component": "flower", + "release": "release-name", + } + assert jmespath.search("spec.type", docs[0]) == "ClusterIP" assert {"name": "flower-ui", "port": 5555} in jmespath.search("spec.ports", docs[0]) def test_overrides(self): @@ -461,11 +485,11 @@ def test_overrides(self): show_only=["templates/flower/flower-service.yaml"], ) - assert {"foo": "bar"} == jmespath.search("metadata.annotations", docs[0]) - assert "LoadBalancer" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("metadata.annotations", docs[0]) == {"foo": "bar"} + assert jmespath.search("spec.type", docs[0]) == "LoadBalancer" assert {"name": "flower-ui", "port": 9000} in jmespath.search("spec.ports", docs[0]) - assert "127.0.0.1" == jmespath.search("spec.loadBalancerIP", docs[0]) - assert ["10.123.0.0/16"] == jmespath.search("spec.loadBalancerSourceRanges", docs[0]) + assert jmespath.search("spec.loadBalancerIP", docs[0]) == "127.0.0.1" + assert jmespath.search("spec.loadBalancerSourceRanges", docs[0]) == ["10.123.0.0/16"] @pytest.mark.parametrize( "ports, expected_ports", @@ -520,7 +544,7 @@ def test_off_by_default(self): docs = render_chart( show_only=["templates/flower/flower-networkpolicy.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_defaults(self): docs = render_chart( @@ -538,12 +562,12 @@ def test_defaults(self): show_only=["templates/flower/flower-networkpolicy.yaml"], ) - assert 1 == len(docs) - assert "NetworkPolicy" == docs[0]["kind"] - assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( - "spec.ingress[0].from", docs[0] - ) - assert [{"port": 5555}] == jmespath.search("spec.ingress[0].ports", docs[0]) + assert len(docs) == 1 + assert docs[0]["kind"] == "NetworkPolicy" + assert jmespath.search("spec.ingress[0].from", docs[0]) == [ + {"namespaceSelector": {"matchLabels": {"release": "myrelease"}}} + ] + assert jmespath.search("spec.ingress[0].ports", docs[0]) == [{"port": 5555}] @pytest.mark.parametrize( "ports, expected_ports", @@ -594,9 +618,9 @@ def test_deprecated_from_param(self): show_only=["templates/flower/flower-networkpolicy.yaml"], ) - assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( - "spec.ingress[0].from", docs[0] - ) + assert jmespath.search("spec.ingress[0].from", docs[0]) == [ + {"namespaceSelector": {"matchLabels": {"release": "myrelease"}}} + ] def test_should_add_component_specific_labels(self): docs = render_chart( diff --git a/helm_tests/other/test_git_ssh_key_secret.py b/helm_tests/other/test_git_ssh_key_secret.py index 73f796a05c0c8..d612e018eab82 100644 --- a/helm_tests/other/test_git_ssh_key_secret.py +++ b/helm_tests/other/test_git_ssh_key_secret.py @@ -38,5 +38,5 @@ def test_create_git_ssh_key_secret(self): show_only=["templates/secrets/git-ssh-key-secret.yaml"], ) - assert "release-name-ssh-secret" == jmespath.search("metadata.name", docs[0]) - assert "Y205dElHbHpJSFJvWlNCcmFXNW4=" == jmespath.search("data.gitSshKey", docs[0]) + assert jmespath.search("metadata.name", docs[0]) == "release-name-ssh-secret" + assert jmespath.search("data.gitSshKey", docs[0]) == "Y205dElHbHpJSFJvWlNCcmFXNW4=" diff --git a/helm_tests/other/test_git_sync_scheduler.py b/helm_tests/other/test_git_sync_scheduler.py index 4610888a3779d..95f038ba32252 100644 --- a/helm_tests/other/test_git_sync_scheduler.py +++ b/helm_tests/other/test_git_sync_scheduler.py @@ -85,7 +85,7 @@ def test_validate_the_git_sync_container_spec(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1]", docs[0]) == { "name": "git-sync-test", "securityContext": {"runAsUser": 65533}, "image": "test-registry/test-repo:test-tag", @@ -111,7 +111,7 @@ def test_validate_the_git_sync_container_spec(self): ], "volumeMounts": [{"mountPath": "/git", "name": "dags"}], "resources": {}, - } == jmespath.search("spec.template.spec.containers[1]", docs[0]) + } def test_validate_the_git_sync_container_spec_if_wait_specified(self): docs = render_chart( @@ -147,7 +147,7 @@ def test_validate_the_git_sync_container_spec_if_wait_specified(self): show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1]", docs[0]) == { "name": "git-sync-test", "securityContext": {"runAsUser": 65533}, "image": "test-registry/test-repo:test-tag", @@ -174,7 +174,7 @@ def test_validate_the_git_sync_container_spec_if_wait_specified(self): ], "volumeMounts": [{"mountPath": "/git", "name": "dags"}], "resources": {}, - } == jmespath.search("spec.template.spec.containers[1]", docs[0]) + } def test_validate_if_ssh_params_are_added(self): docs = render_chart( @@ -393,8 +393,8 @@ def test_resources_are_configurable(self): }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[1].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) == "128Mi" + assert ( + jmespath.search("spec.template.spec.containers[1].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) == "300m" diff --git a/helm_tests/other/test_git_sync_webserver.py b/helm_tests/other/test_git_sync_webserver.py index 8ecde2348cee4..f53a56913e249 100644 --- a/helm_tests/other/test_git_sync_webserver.py +++ b/helm_tests/other/test_git_sync_webserver.py @@ -34,7 +34,7 @@ def test_should_add_dags_volume_to_the_webserver_if_git_sync_and_persistence_is_ show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "dags" def test_should_add_dags_volume_to_the_webserver_if_git_sync_is_enabled_and_persistence_is_disabled(self): docs = render_chart( @@ -45,7 +45,7 @@ def test_should_add_dags_volume_to_the_webserver_if_git_sync_is_enabled_and_pers show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "dags" def test_should_add_git_sync_container_to_webserver_if_persistence_is_not_enabled_but_git_sync_is(self): docs = render_chart( @@ -59,7 +59,7 @@ def test_should_add_git_sync_container_to_webserver_if_persistence_is_not_enable show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "git-sync" == jmespath.search("spec.template.spec.containers[1].name", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].name", docs[0]) == "git-sync" def test_should_have_service_account_defined(self): docs = render_chart( @@ -67,8 +67,9 @@ def test_should_have_service_account_defined(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "release-name-airflow-webserver" == jmespath.search( - "spec.template.spec.serviceAccountName", docs[0] + assert ( + jmespath.search("spec.template.spec.serviceAccountName", docs[0]) + == "release-name-airflow-webserver" ) @pytest.mark.parametrize( @@ -149,11 +150,11 @@ def test_resources_are_configurable(self): }, show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[1].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) == "128Mi" + assert ( + jmespath.search("spec.template.spec.containers[1].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) == "300m" def test_validate_sshkeysecret_not_added_when_persistence_is_enabled(self): docs = render_chart( diff --git a/helm_tests/other/test_git_sync_worker.py b/helm_tests/other/test_git_sync_worker.py index 44613f5e8ea27..3397e52319068 100644 --- a/helm_tests/other/test_git_sync_worker.py +++ b/helm_tests/other/test_git_sync_worker.py @@ -33,8 +33,8 @@ def test_should_add_dags_volume_to_the_worker_if_git_sync_and_persistence_is_ena show_only=["templates/workers/worker-deployment.yaml"], ) - assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) - assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) + assert jmespath.search("spec.template.spec.volumes[0].name", docs[0]) == "config" + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "dags" def test_should_add_dags_volume_to_the_worker_if_git_sync_is_enabled_and_persistence_is_disabled(self): docs = render_chart( @@ -45,8 +45,8 @@ def test_should_add_dags_volume_to_the_worker_if_git_sync_is_enabled_and_persist show_only=["templates/workers/worker-deployment.yaml"], ) - assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) - assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) + assert jmespath.search("spec.template.spec.volumes[0].name", docs[0]) == "config" + assert jmespath.search("spec.template.spec.volumes[1].name", docs[0]) == "dags" def test_should_add_git_sync_container_to_worker_if_persistence_is_not_enabled_but_git_sync_is(self): docs = render_chart( @@ -60,7 +60,7 @@ def test_should_add_git_sync_container_to_worker_if_persistence_is_not_enabled_b show_only=["templates/workers/worker-deployment.yaml"], ) - assert "git-sync" == jmespath.search("spec.template.spec.containers[1].name", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].name", docs[0]) == "git-sync" def test_should_not_add_sync_container_to_worker_if_git_sync_and_persistence_are_enabled(self): docs = render_chart( @@ -74,7 +74,7 @@ def test_should_not_add_sync_container_to_worker_if_git_sync_and_persistence_are show_only=["templates/workers/worker-deployment.yaml"], ) - assert "git-sync" != jmespath.search("spec.template.spec.containers[1].name", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].name", docs[0]) != "git-sync" def test_should_add_env(self): docs = render_chart( @@ -108,11 +108,11 @@ def test_resources_are_configurable(self): }, show_only=["templates/workers/worker-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[1].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) == "128Mi" + assert ( + jmespath.search("spec.template.spec.containers[1].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) == "300m" def test_validate_sshkeysecret_not_added_when_persistence_is_enabled(self): docs = render_chart( @@ -196,9 +196,9 @@ def test_container_lifecycle_hooks(self): }, show_only=["templates/workers/worker-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1].lifecycle", docs[0]) == { "postStart": { "exec": {"command": ["/bin/sh", "-c", "echo postStart handler > /git/message_start"]} }, "preStop": {"exec": {"command": ["/bin/sh", "-c", "echo preStop handler > /git/message_start"]}}, - } == jmespath.search("spec.template.spec.containers[1].lifecycle", docs[0]) + } diff --git a/helm_tests/other/test_limit_ranges.py b/helm_tests/other/test_limit_ranges.py index 92b7b862a1802..39c6fe4459299 100644 --- a/helm_tests/other/test_limit_ranges.py +++ b/helm_tests/other/test_limit_ranges.py @@ -29,8 +29,8 @@ def test_limit_ranges_template(self): values={"limits": [{"max": {"cpu": "500m"}, "min": {"min": "200m"}, "type": "Container"}]}, show_only=["templates/limitrange.yaml"], ) - assert "LimitRange" == jmespath.search("kind", docs[0]) - assert "500m" == jmespath.search("spec.limits[0].max.cpu", docs[0]) + assert jmespath.search("kind", docs[0]) == "LimitRange" + assert jmespath.search("spec.limits[0].max.cpu", docs[0]) == "500m" def test_limit_ranges_are_not_added_by_default(self): docs = render_chart(show_only=["templates/limitrange.yaml"]) diff --git a/helm_tests/other/test_pgbouncer.py b/helm_tests/other/test_pgbouncer.py index c6b15826f36cd..04b84d2325b09 100644 --- a/helm_tests/other/test_pgbouncer.py +++ b/helm_tests/other/test_pgbouncer.py @@ -40,9 +40,9 @@ def test_should_create_pgbouncer(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert "Deployment" == jmespath.search("kind", docs[0]) - assert "release-name-pgbouncer" == jmespath.search("metadata.name", docs[0]) - assert "pgbouncer" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) + assert jmespath.search("kind", docs[0]) == "Deployment" + assert jmespath.search("metadata.name", docs[0]) == "release-name-pgbouncer" + assert jmespath.search("spec.template.spec.containers[0].name", docs[0]) == "pgbouncer" def test_should_create_pgbouncer_service(self): docs = render_chart( @@ -50,14 +50,15 @@ def test_should_create_pgbouncer_service(self): show_only=["templates/pgbouncer/pgbouncer-service.yaml"], ) - assert "Service" == jmespath.search("kind", docs[0]) - assert "release-name-pgbouncer" == jmespath.search("metadata.name", docs[0]) - assert "true" == jmespath.search('metadata.annotations."prometheus.io/scrape"', docs[0]) - assert "9127" == jmespath.search('metadata.annotations."prometheus.io/port"', docs[0]) + assert jmespath.search("kind", docs[0]) == "Service" + assert jmespath.search("metadata.name", docs[0]) == "release-name-pgbouncer" + assert jmespath.search('metadata.annotations."prometheus.io/scrape"', docs[0]) == "true" + assert jmespath.search('metadata.annotations."prometheus.io/port"', docs[0]) == "9127" - assert {"prometheus.io/scrape": "true", "prometheus.io/port": "9127"} == jmespath.search( - "metadata.annotations", docs[0] - ) + assert jmespath.search("metadata.annotations", docs[0]) == { + "prometheus.io/scrape": "true", + "prometheus.io/port": "9127", + } assert {"name": "pgbouncer", "protocol": "TCP", "port": 6543} in jmespath.search( "spec.ports", docs[0] @@ -75,8 +76,8 @@ def test_pgbouncer_service_with_custom_ports(self): show_only=["templates/pgbouncer/pgbouncer-service.yaml"], ) - assert "true" == jmespath.search('metadata.annotations."prometheus.io/scrape"', docs[0]) - assert "2222" == jmespath.search('metadata.annotations."prometheus.io/port"', docs[0]) + assert jmespath.search('metadata.annotations."prometheus.io/scrape"', docs[0]) == "true" + assert jmespath.search('metadata.annotations."prometheus.io/port"', docs[0]) == "2222" assert {"name": "pgbouncer", "protocol": "TCP", "port": 1111} in jmespath.search( "spec.ports", docs[0] ) @@ -92,11 +93,11 @@ def test_pgbouncer_service_extra_annotations(self): show_only=["templates/pgbouncer/pgbouncer-service.yaml"], ) - assert { + assert jmespath.search("metadata.annotations", docs[0]) == { "prometheus.io/scrape": "true", "prometheus.io/port": "9127", "foo": "bar", - } == jmespath.search("metadata.annotations", docs[0]) + } def test_pgbouncer_service_static_cluster_ip(self): docs = render_chart( @@ -106,7 +107,7 @@ def test_pgbouncer_service_static_cluster_ip(self): show_only=["templates/pgbouncer/pgbouncer-service.yaml"], ) - assert "10.10.10.10" == jmespath.search("spec.clusterIP", docs[0]) + assert jmespath.search("spec.clusterIP", docs[0]) == "10.10.10.10" @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", @@ -135,9 +136,12 @@ def test_scheduler_name(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): @@ -167,21 +171,30 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_no_existing_secret(self): @@ -193,10 +206,10 @@ def test_no_existing_secret(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.volumes[0]", docs[0]) == { "name": "pgbouncer-config", "secret": {"secretName": "test-pgbouncer-config-pgbouncer-config"}, - } == jmespath.search("spec.template.spec.volumes[0]", docs[0]) + } def test_existing_secret(self): docs = render_chart( @@ -207,10 +220,10 @@ def test_existing_secret(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.volumes[0]", docs[0]) == { "name": "pgbouncer-config", "secret": {"secretName": "pgbouncer-config-secret"}, - } == jmespath.search("spec.template.spec.volumes[0]", docs[0]) + } def test_pgbouncer_resources_are_configurable(self): docs = render_chart( @@ -225,11 +238,11 @@ def test_pgbouncer_resources_are_configurable(self): }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" def test_pgbouncer_resources_are_not_added_by_default(self): docs = render_chart( @@ -256,7 +269,7 @@ def test_metrics_exporter_resources(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1].resources", docs[0]) == { "limits": { "cpu": "2", "memory": "3Gi", @@ -265,7 +278,7 @@ def test_metrics_exporter_resources(self): "cpu": "1", "memory": "2Gi", }, - } == jmespath.search("spec.template.spec.containers[1].resources", docs[0]) + } def test_default_command_and_args(self): docs = render_chart( @@ -273,9 +286,12 @@ def test_default_command_and_args(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert ["pgbouncer", "-u", "nobody", "/etc/pgbouncer/pgbouncer.ini"] == jmespath.search( - "spec.template.spec.containers[0].command", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == [ + "pgbouncer", + "-u", + "nobody", + "/etc/pgbouncer/pgbouncer.ini", + ] assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) is None @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @@ -301,8 +317,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( @@ -369,7 +385,7 @@ def test_pgbouncer_replicas_are_configurable(self): }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert 2 == jmespath.search("spec.replicas", docs[0]) + assert jmespath.search("spec.replicas", docs[0]) == 2 def test_should_add_component_specific_annotations(self): docs = render_chart( @@ -574,10 +590,10 @@ def test_should_add_extra_containers(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } class TestPgbouncerExporter: @@ -587,7 +603,7 @@ def test_secret_not_created_by_default(self): docs = render_chart( show_only=["templates/secrets/pgbouncer-stats-secret.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def _get_connection(self, values: dict) -> str: docs = render_chart( @@ -599,7 +615,7 @@ def _get_connection(self, values: dict) -> str: def test_default_exporter_secret(self): connection = self._get_connection({"pgbouncer": {"enabled": True}}) - assert "postgresql://postgres:postgres@127.0.0.1:6543/pgbouncer?sslmode=disable" == connection + assert connection == "postgresql://postgres:postgres@127.0.0.1:6543/pgbouncer?sslmode=disable" def test_exporter_secret_with_overrides(self): connection = self._get_connection( @@ -618,8 +634,8 @@ def test_exporter_secret_with_overrides(self): } ) assert ( - "postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@127.0.0.1:1111" - "/pgbouncer?sslmode=require" == connection + connection == "postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@127.0.0.1:1111" + "/pgbouncer?sslmode=require" ) def test_no_existing_secret(self): @@ -631,10 +647,10 @@ def test_no_existing_secret(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) == { "name": "test-pgbouncer-stats-pgbouncer-stats", "key": "connection", - } == jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) + } def test_existing_secret(self): docs = render_chart( @@ -650,10 +666,10 @@ def test_existing_secret(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) == { "name": "existing-stats-secret", "key": "connection", - } == jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) + } def test_existing_secret_existing_key(self): docs = render_chart( @@ -670,10 +686,10 @@ def test_existing_secret_existing_key(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) == { "name": "existing-stats-secret", "key": "exisiting-stats-secret-key", - } == jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) + } def test_unused_secret_key(self): docs = render_chart( @@ -689,10 +705,10 @@ def test_unused_secret_key(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) == { "name": "test-pgbouncer-stats-pgbouncer-stats", "key": "connection", - } == jmespath.search("spec.template.spec.containers[1].env[0].valueFrom.secretKeyRef", docs[0]) + } class TestPgBouncerServiceAccount: @@ -732,8 +748,8 @@ def test_should_create_pgbouncer_network_policy(self): show_only=["templates/pgbouncer/pgbouncer-networkpolicy.yaml"], ) - assert "NetworkPolicy" == jmespath.search("kind", docs[0]) - assert "release-name-pgbouncer-policy" == jmespath.search("metadata.name", docs[0]) + assert jmespath.search("kind", docs[0]) == "NetworkPolicy" + assert jmespath.search("metadata.name", docs[0]) == "release-name-pgbouncer-policy" @pytest.mark.parametrize( "conf, expected_selector", @@ -859,12 +875,14 @@ def test_pgbouncer_ingress(self): show_only=["templates/pgbouncer/pgbouncer-ingress.yaml"], ) - assert {"name": "release-name-pgbouncer", "port": {"name": "pgb-metrics"}} == jmespath.search( - "spec.rules[0].http.paths[0].backend.service", docs[0] - ) - assert "/metrics" == jmespath.search("spec.rules[0].http.paths[0].path", docs[0]) - assert "some-host" == jmespath.search("spec.rules[0].host", docs[0]) - assert {"hosts": ["some-host"], "secretName": "some-secret"} == jmespath.search( - "spec.tls[0]", docs[0] - ) - assert "ingress-class" == jmespath.search("spec.ingressClassName", docs[0]) + assert jmespath.search("spec.rules[0].http.paths[0].backend.service", docs[0]) == { + "name": "release-name-pgbouncer", + "port": {"name": "pgb-metrics"}, + } + assert jmespath.search("spec.rules[0].http.paths[0].path", docs[0]) == "/metrics" + assert jmespath.search("spec.rules[0].host", docs[0]) == "some-host" + assert jmespath.search("spec.tls[0]", docs[0]) == { + "hosts": ["some-host"], + "secretName": "some-secret", + } + assert jmespath.search("spec.ingressClassName", docs[0]) == "ingress-class" diff --git a/helm_tests/other/test_redis.py b/helm_tests/other/test_redis.py index 8c44567420314..046d51e5237d7 100644 --- a/helm_tests/other/test_redis.py +++ b/helm_tests/other/test_redis.py @@ -267,7 +267,7 @@ def test_default_redis_secrets_created_with_non_celery_executor(self): docs = render_chart( values={"executor": "KubernetesExecutor"}, show_only=["templates/secrets/redis-secrets.yaml"] ) - assert 2 == len(docs) + assert len(docs) == 2 def test_scheduler_name(self): docs = render_chart( @@ -275,9 +275,12 @@ def test_scheduler_name(self): show_only=["templates/redis/redis-statefulset.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): @@ -307,22 +310,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/redis/redis-statefulset.yaml"], ) - assert "StatefulSet" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "StatefulSet" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_redis_resources_are_configurable(self): @@ -337,11 +349,11 @@ def test_redis_resources_are_configurable(self): }, show_only=["templates/redis/redis-statefulset.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" def test_redis_resources_are_not_added_by_default(self): docs = render_chart( @@ -364,7 +376,7 @@ def test_persistence_volume_annotations(self): values={"redis": {"persistence": {"annotations": {"foo": "bar"}}}}, show_only=["templates/redis/redis-statefulset.yaml"], ) - assert {"foo": "bar"} == jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) + assert jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) == {"foo": "bar"} @pytest.mark.parametrize( "redis_values, expected", @@ -389,9 +401,12 @@ def test_priority_class_name(self): show_only=["templates/redis/redis-statefulset.yaml"], ) - assert "airflow-priority-class-name" == jmespath.search( - "spec.template.spec.priorityClassName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.priorityClassName", + docs[0], + ) + == "airflow-priority-class-name" ) def test_redis_template_storage_class_name(self): @@ -399,8 +414,9 @@ def test_redis_template_storage_class_name(self): values={"redis": {"persistence": {"storageClassName": "{{ .Release.Name }}-storage-class"}}}, show_only=["templates/redis/redis-statefulset.yaml"], ) - assert "release-name-storage-class" == jmespath.search( - "spec.volumeClaimTemplates[0].spec.storageClassName", docs[0] + assert ( + jmespath.search("spec.volumeClaimTemplates[0].spec.storageClassName", docs[0]) + == "release-name-storage-class" ) def test_redis_template_persistence_storage_existing_claim(self): @@ -481,7 +497,7 @@ def test_redis_service_nodeport(self): }, show_only=["templates/redis/redis-service.yaml"], ) - assert 11111 == jmespath.search("spec.ports[0].nodePort", docs[0]) + assert jmespath.search("spec.ports[0].nodePort", docs[0]) == 11111 def test_redis_service_clusterIP(self): docs = render_chart( @@ -492,4 +508,4 @@ def test_redis_service_clusterIP(self): }, show_only=["templates/redis/redis-service.yaml"], ) - assert "127.0.0.1" == jmespath.search("spec.clusterIP", docs[0]) + assert jmespath.search("spec.clusterIP", docs[0]) == "127.0.0.1" diff --git a/helm_tests/other/test_resource_quota.py b/helm_tests/other/test_resource_quota.py index b63c111a4dc3d..1807ee68aaa05 100644 --- a/helm_tests/other/test_resource_quota.py +++ b/helm_tests/other/test_resource_quota.py @@ -38,8 +38,8 @@ def test_resource_quota_template(self): }, show_only=["templates/resourcequota.yaml"], ) - assert "ResourceQuota" == jmespath.search("kind", docs[0]) - assert "20" == jmespath.search("spec.hard.replicationcontrollers", docs[0]) + assert jmespath.search("kind", docs[0]) == "ResourceQuota" + assert jmespath.search("spec.hard.replicationcontrollers", docs[0]) == "20" def test_resource_quota_are_not_added_by_default(self): docs = render_chart( diff --git a/helm_tests/other/test_statsd.py b/helm_tests/other/test_statsd.py index 037895f430723..d54efc0f9e457 100644 --- a/helm_tests/other/test_statsd.py +++ b/helm_tests/other/test_statsd.py @@ -29,9 +29,9 @@ class TestStatsd: def test_should_create_statsd_default(self): docs = render_chart(show_only=["templates/statsd/statsd-deployment.yaml"]) - assert "release-name-statsd" == jmespath.search("metadata.name", docs[0]) + assert jmespath.search("metadata.name", docs[0]) == "release-name-statsd" - assert "statsd" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].name", docs[0]) == "statsd" assert {"name": "config", "configMap": {"name": "release-name-statsd"}} in jmespath.search( "spec.template.spec.volumes", docs[0] @@ -111,9 +111,12 @@ def test_scheduler_name(self): show_only=["templates/statsd/statsd-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): @@ -142,22 +145,31 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/statsd/statsd-deployment.yaml"], ) - assert "Deployment" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Deployment" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_stastd_resources_are_configurable(self): @@ -172,11 +184,11 @@ def test_stastd_resources_are_configurable(self): }, show_only=["templates/statsd/statsd-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" def test_statsd_security_contexts_are_configurable(self): docs = render_chart( @@ -198,16 +210,17 @@ def test_statsd_security_contexts_are_configurable(self): }, show_only=["templates/statsd/statsd-deployment.yaml"], ) - assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( - "spec.template.spec.containers[0].securityContext", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].securityContext", docs[0]) == { + "allowPrivilegeEscalation": False, + "readOnlyRootFilesystem": True, + } - assert { + assert jmespath.search("spec.template.spec.securityContext", docs[0]) == { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, - } == jmespath.search("spec.template.spec.securityContext", docs[0]) + } def test_statsd_security_context_legacy(self): docs = render_chart( @@ -224,12 +237,12 @@ def test_statsd_security_context_legacy(self): show_only=["templates/statsd/statsd-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.securityContext", docs[0]) == { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, - } == jmespath.search("spec.template.spec.securityContext", docs[0]) + } def test_statsd_resources_are_not_added_by_default(self): docs = render_chart( @@ -261,8 +274,8 @@ def test_statsd_configmap_when_exist_extra_mappings(self): mappings_yml = jmespath.search('data."mappings.yml"', docs[0]) mappings_yml_obj = yaml.safe_load(mappings_yml) - assert "airflow_dagrun_dependency_check" == mappings_yml_obj["mappings"][0]["name"] - assert "airflow_pool_queued_slots" == mappings_yml_obj["mappings"][-1]["name"] + assert mappings_yml_obj["mappings"][0]["name"] == "airflow_dagrun_dependency_check" + assert mappings_yml_obj["mappings"][-1]["name"] == "airflow_pool_queued_slots" def test_statsd_configmap_when_exist_override_mappings(self): override_mapping = { @@ -278,8 +291,8 @@ def test_statsd_configmap_when_exist_override_mappings(self): mappings_yml = jmespath.search('data."mappings.yml"', docs[0]) mappings_yml_obj = yaml.safe_load(mappings_yml) - assert 1 == len(mappings_yml_obj["mappings"]) - assert "airflow_pool_queued_slots" == mappings_yml_obj["mappings"][0]["name"] + assert len(mappings_yml_obj["mappings"]) == 1 + assert mappings_yml_obj["mappings"][0]["name"] == "airflow_pool_queued_slots" def test_statsd_args_can_be_overridden(self): args = ["--some-arg=foo"] @@ -398,12 +411,14 @@ def test_statsd_ingress(self): show_only=["templates/statsd/statsd-ingress.yaml"], ) - assert {"name": "release-name-statsd", "port": {"name": "statsd-scrape"}} == jmespath.search( - "spec.rules[0].http.paths[0].backend.service", docs[0] - ) - assert "/metrics" == jmespath.search("spec.rules[0].http.paths[0].path", docs[0]) - assert "some-host" == jmespath.search("spec.rules[0].host", docs[0]) - assert {"hosts": ["some-host"], "secretName": "some-secret"} == jmespath.search( - "spec.tls[0]", docs[0] - ) - assert "ingress-class" == jmespath.search("spec.ingressClassName", docs[0]) + assert jmespath.search("spec.rules[0].http.paths[0].backend.service", docs[0]) == { + "name": "release-name-statsd", + "port": {"name": "statsd-scrape"}, + } + assert jmespath.search("spec.rules[0].http.paths[0].path", docs[0]) == "/metrics" + assert jmespath.search("spec.rules[0].host", docs[0]) == "some-host" + assert jmespath.search("spec.tls[0]", docs[0]) == { + "hosts": ["some-host"], + "secretName": "some-secret", + } + assert jmespath.search("spec.ingressClassName", docs[0]) == "ingress-class" diff --git a/helm_tests/security/test_elasticsearch_secret.py b/helm_tests/security/test_elasticsearch_secret.py index 43889cca93600..57f2f93f0d079 100644 --- a/helm_tests/security/test_elasticsearch_secret.py +++ b/helm_tests/security/test_elasticsearch_secret.py @@ -34,7 +34,7 @@ def test_should_not_generate_a_document_if_elasticsearch_disabled(self): show_only=["templates/secrets/elasticsearch-secret.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_should_raise_error_when_connection_not_provided(self): with pytest.raises(CalledProcessError) as ex_ctx: @@ -95,8 +95,9 @@ def test_should_correctly_handle_password_with_special_characters(self): ) assert ( - "http://username%21%40%23$%25%25%5E&%2A%28%29:password%21%40%23$%25%25%5E&%2A%28%29@" - "elastichostname:9200" == connection + connection + == "http://username%21%40%23$%25%25%5E&%2A%28%29:password%21%40%23$%25%25%5E&%2A%28%29@" + "elastichostname:9200" ) def test_should_generate_secret_with_specified_port(self): @@ -114,7 +115,7 @@ def test_should_generate_secret_with_specified_port(self): } ) - assert "http://username:password@elastichostname:2222" == connection + assert connection == "http://username:password@elastichostname:2222" @pytest.mark.parametrize("scheme", ["http", "https"]) def test_should_generate_secret_with_specified_schemes(self, scheme): @@ -158,6 +159,6 @@ def test_url_generated_when_user_pass_empty_combinations(self, extra_conn_kwargs ) if not expected_user_info: - assert "http://elastichostname:8080" == connection + assert connection == "http://elastichostname:8080" else: assert f"http://{expected_user_info}@elastichostname:8080" == connection diff --git a/helm_tests/security/test_kerberos.py b/helm_tests/security/test_kerberos.py index 02c7d4e534c50..332d3e3d1d1b2 100644 --- a/helm_tests/security/test_kerberos.py +++ b/helm_tests/security/test_kerberos.py @@ -152,4 +152,4 @@ def test_kerberos_keytab_secret_unavailable_when_not_specified(self): show_only=["templates/secrets/kerberos-keytab-secret.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 diff --git a/helm_tests/security/test_metadata_connection_secret.py b/helm_tests/security/test_metadata_connection_secret.py index 4a64f3ab5b2b4..7c38fbdbb752a 100644 --- a/helm_tests/security/test_metadata_connection_secret.py +++ b/helm_tests/security/test_metadata_connection_secret.py @@ -40,7 +40,7 @@ def test_should_not_generate_a_document_if_using_existing_secret(self): show_only=["templates/secrets/metadata-connection-secret.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def _get_connection(self, values: dict) -> str: docs = render_chart( @@ -54,8 +54,8 @@ def test_default_connection(self): connection = self._get_connection({}) assert ( - "postgresql://postgres:postgres@release-name-postgresql.default:5432/postgres?sslmode=disable" - == connection + connection + == "postgresql://postgres:postgres@release-name-postgresql.default:5432/postgres?sslmode=disable" ) def test_should_set_pgbouncer_overrides_when_enabled(self): @@ -64,8 +64,8 @@ def test_should_set_pgbouncer_overrides_when_enabled(self): # host, port, dbname get overridden assert ( - "postgresql://postgres:postgres@release-name-pgbouncer.default:6543" - "/release-name-metadata?sslmode=disable" == connection + connection == "postgresql://postgres:postgres@release-name-pgbouncer.default:6543" + "/release-name-metadata?sslmode=disable" ) def test_should_set_pgbouncer_overrides_with_non_chart_database_when_enabled(self): @@ -77,8 +77,8 @@ def test_should_set_pgbouncer_overrides_with_non_chart_database_when_enabled(sel # host, port, dbname still get overridden even with an non-chart db assert ( - "postgresql://someuser:somepass@release-name-pgbouncer.default:6543" - "/release-name-metadata?sslmode=disable" == connection + connection == "postgresql://someuser:somepass@release-name-pgbouncer.default:6543" + "/release-name-metadata?sslmode=disable" ) def test_should_correctly_use_non_chart_database(self): @@ -92,7 +92,7 @@ def test_should_correctly_use_non_chart_database(self): } connection = self._get_connection(values) - assert "postgresql://someuser:somepass@somehost:7777/somedb?sslmode=require" == connection + assert connection == "postgresql://someuser:somepass@somehost:7777/somedb?sslmode=require" def test_should_support_non_postgres_db(self): values = { @@ -106,7 +106,7 @@ def test_should_support_non_postgres_db(self): connection = self._get_connection(values) # sslmode is only added for postgresql - assert "mysql://someuser:somepass@somehost:7777/somedb" == connection + assert connection == "mysql://someuser:somepass@somehost:7777/somedb" def test_should_correctly_handle_password_with_special_characters(self): values = { @@ -122,6 +122,6 @@ def test_should_correctly_handle_password_with_special_characters(self): # sslmode is only added for postgresql assert ( - "postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@somehost:7777/" - "somedb?sslmode=disable" == connection + connection == "postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@somehost:7777/" + "somedb?sslmode=disable" ) diff --git a/helm_tests/security/test_result_backend_connection_secret.py b/helm_tests/security/test_result_backend_connection_secret.py index f23dfe83bd05e..f39e6dfce9e33 100644 --- a/helm_tests/security/test_result_backend_connection_secret.py +++ b/helm_tests/security/test_result_backend_connection_secret.py @@ -54,7 +54,7 @@ def test_should_not_generate_a_document_if_using_existing_secret(self): show_only=["templates/secrets/result-backend-connection-secret.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 @pytest.mark.parametrize( "executor, expected_doc_count", @@ -140,8 +140,8 @@ def test_should_set_pgbouncer_overrides_with_non_chart_database_when_enabled(sel # host, port, dbname still get overridden even with an non-chart db assert ( - "db+postgresql://someuser:somepass@release-name-pgbouncer:6543" - "/release-name-result-backend?sslmode=allow" == connection + connection == "db+postgresql://someuser:somepass@release-name-pgbouncer:6543" + "/release-name-result-backend?sslmode=allow" ) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) @@ -160,7 +160,7 @@ def test_should_correctly_use_non_chart_database(self): values = {"data": {"resultBackendConnection": {**self.non_chart_database_values}}} connection = self._get_connection(values) - assert "db+postgresql://someuser:somepass@somehost:7777/somedb?sslmode=allow" == connection + assert connection == "db+postgresql://someuser:somepass@somehost:7777/somedb?sslmode=allow" def test_should_support_non_postgres_db(self): values = { @@ -174,7 +174,7 @@ def test_should_support_non_postgres_db(self): connection = self._get_connection(values) # sslmode is only added for postgresql - assert "db+mysql://someuser:somepass@somehost:7777/somedb" == connection + assert connection == "db+mysql://someuser:somepass@somehost:7777/somedb" def test_should_correctly_use_non_chart_database_when_both_db_are_external(self): values = { @@ -189,7 +189,7 @@ def test_should_correctly_use_non_chart_database_when_both_db_are_external(self) } connection = self._get_connection(values) - assert "db+postgresql://anotheruser:anotherpass@somehost:7777/somedb?sslmode=allow" == connection + assert connection == "db+postgresql://anotheruser:anotherpass@somehost:7777/somedb?sslmode=allow" def test_should_correctly_handle_password_with_special_characters(self): values = { @@ -204,6 +204,7 @@ def test_should_correctly_handle_password_with_special_characters(self): connection = self._get_connection(values) assert ( - "db+postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@somehost:7777/" - "somedb?sslmode=allow" == connection + connection + == "db+postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@somehost:7777/" + "somedb?sslmode=allow" ) diff --git a/helm_tests/security/test_scc_rolebinding.py b/helm_tests/security/test_scc_rolebinding.py index e77845811929d..78c796960032c 100644 --- a/helm_tests/security/test_scc_rolebinding.py +++ b/helm_tests/security/test_scc_rolebinding.py @@ -48,19 +48,19 @@ def test_create_scc(self, rbac_enabled, scc_enabled, created): assert bool(docs) is created if created: - assert "RoleBinding" == jmespath.search("kind", docs[0]) - assert "ClusterRole" == jmespath.search("roleRef.kind", docs[0]) - assert "release-name-scc-rolebinding" == jmespath.search("metadata.name", docs[0]) - assert "system:openshift:scc:anyuid" == jmespath.search("roleRef.name", docs[0]) - assert "release-name-airflow-webserver" == jmespath.search("subjects[0].name", docs[0]) - assert "release-name-airflow-worker" == jmespath.search("subjects[1].name", docs[0]) - assert "release-name-airflow-scheduler" == jmespath.search("subjects[2].name", docs[0]) - assert "release-name-airflow-statsd" == jmespath.search("subjects[3].name", docs[0]) - assert "release-name-airflow-flower" == jmespath.search("subjects[4].name", docs[0]) - assert "release-name-airflow-triggerer" == jmespath.search("subjects[5].name", docs[0]) - assert "release-name-airflow-migrate-database-job" == jmespath.search("subjects[6].name", docs[0]) - assert "release-name-airflow-create-user-job" == jmespath.search("subjects[7].name", docs[0]) - assert "release-name-airflow-cleanup" == jmespath.search("subjects[8].name", docs[0]) + assert jmespath.search("kind", docs[0]) == "RoleBinding" + assert jmespath.search("roleRef.kind", docs[0]) == "ClusterRole" + assert jmespath.search("metadata.name", docs[0]) == "release-name-scc-rolebinding" + assert jmespath.search("roleRef.name", docs[0]) == "system:openshift:scc:anyuid" + assert jmespath.search("subjects[0].name", docs[0]) == "release-name-airflow-webserver" + assert jmespath.search("subjects[1].name", docs[0]) == "release-name-airflow-worker" + assert jmespath.search("subjects[2].name", docs[0]) == "release-name-airflow-scheduler" + assert jmespath.search("subjects[3].name", docs[0]) == "release-name-airflow-statsd" + assert jmespath.search("subjects[4].name", docs[0]) == "release-name-airflow-flower" + assert jmespath.search("subjects[5].name", docs[0]) == "release-name-airflow-triggerer" + assert jmespath.search("subjects[6].name", docs[0]) == "release-name-airflow-migrate-database-job" + assert jmespath.search("subjects[7].name", docs[0]) == "release-name-airflow-create-user-job" + assert jmespath.search("subjects[8].name", docs[0]) == "release-name-airflow-cleanup" @pytest.mark.parametrize( "rbac_enabled,scc_enabled,created,namespace,expected_name", @@ -84,10 +84,10 @@ def test_create_scc_multinamespace(self, rbac_enabled, scc_enabled, created, nam assert bool(docs) is created if created: - assert "ClusterRoleBinding" == jmespath.search("kind", docs[0]) - assert "ClusterRole" == jmespath.search("roleRef.kind", docs[0]) + assert jmespath.search("kind", docs[0]) == "ClusterRoleBinding" + assert jmespath.search("roleRef.kind", docs[0]) == "ClusterRole" assert expected_name == jmespath.search("metadata.name", docs[0]) - assert "system:openshift:scc:anyuid" == jmespath.search("roleRef.name", docs[0]) + assert jmespath.search("roleRef.name", docs[0]) == "system:openshift:scc:anyuid" @pytest.mark.parametrize( "rbac_enabled,scc_enabled,created", @@ -110,12 +110,12 @@ def test_create_scc_worker_only(self, rbac_enabled, scc_enabled, created): assert bool(docs) is created if created: - assert "RoleBinding" == jmespath.search("kind", docs[0]) - assert "ClusterRole" == jmespath.search("roleRef.kind", docs[0]) - assert "release-name-scc-rolebinding" == jmespath.search("metadata.name", docs[0]) - assert "system:openshift:scc:anyuid" == jmespath.search("roleRef.name", docs[0]) - assert "release-name-airflow-webserver" == jmespath.search("subjects[0].name", docs[0]) - assert "release-name-airflow-worker" == jmespath.search("subjects[1].name", docs[0]) - assert "release-name-airflow-scheduler" == jmespath.search("subjects[2].name", docs[0]) - assert "release-name-airflow-triggerer" == jmespath.search("subjects[3].name", docs[0]) - assert "release-name-airflow-migrate-database-job" == jmespath.search("subjects[4].name", docs[0]) + assert jmespath.search("kind", docs[0]) == "RoleBinding" + assert jmespath.search("roleRef.kind", docs[0]) == "ClusterRole" + assert jmespath.search("metadata.name", docs[0]) == "release-name-scc-rolebinding" + assert jmespath.search("roleRef.name", docs[0]) == "system:openshift:scc:anyuid" + assert jmespath.search("subjects[0].name", docs[0]) == "release-name-airflow-webserver" + assert jmespath.search("subjects[1].name", docs[0]) == "release-name-airflow-worker" + assert jmespath.search("subjects[2].name", docs[0]) == "release-name-airflow-scheduler" + assert jmespath.search("subjects[3].name", docs[0]) == "release-name-airflow-triggerer" + assert jmespath.search("subjects[4].name", docs[0]) == "release-name-airflow-migrate-database-job" diff --git a/helm_tests/security/test_security_context.py b/helm_tests/security/test_security_context.py index c6f8f8ce799d8..a7f164cdb2887 100644 --- a/helm_tests/security/test_security_context.py +++ b/helm_tests/security/test_security_context.py @@ -46,8 +46,8 @@ def test_check_deployments_and_jobs(self): ) for doc in docs: - assert 3000 == jmespath.search("spec.template.spec.securityContext.runAsUser", doc) - assert 30 == jmespath.search("spec.template.spec.securityContext.fsGroup", doc) + assert jmespath.search("spec.template.spec.securityContext.runAsUser", doc) == 3000 + assert jmespath.search("spec.template.spec.securityContext.fsGroup", doc) == 30 def test_check_statsd_uid(self): docs = render_chart( @@ -55,7 +55,7 @@ def test_check_statsd_uid(self): show_only=["templates/statsd/statsd-deployment.yaml"], ) - assert 3000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) + assert jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) == 3000 def test_check_pgbouncer_uid(self): docs = render_chart( @@ -63,7 +63,7 @@ def test_check_pgbouncer_uid(self): show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) - assert 3000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) + assert jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) == 3000 def test_check_cleanup_job(self): docs = render_chart( @@ -71,10 +71,10 @@ def test_check_cleanup_job(self): show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) - assert 3000 == jmespath.search( - "spec.jobTemplate.spec.template.spec.securityContext.runAsUser", docs[0] + assert ( + jmespath.search("spec.jobTemplate.spec.template.spec.securityContext.runAsUser", docs[0]) == 3000 ) - assert 30 == jmespath.search("spec.jobTemplate.spec.template.spec.securityContext.fsGroup", docs[0]) + assert jmespath.search("spec.jobTemplate.spec.template.spec.securityContext.fsGroup", docs[0]) == 30 def test_gitsync_sidecar_and_init_container(self): docs = render_chart( @@ -94,13 +94,19 @@ def test_gitsync_sidecar_and_init_container(self): assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", doc) ] - assert 3000 == jmespath.search( - "spec.template.spec.initContainers[?name=='git-sync-init'].securityContext.runAsUser | [0]", - doc, + assert ( + jmespath.search( + "spec.template.spec.initContainers[?name=='git-sync-init'].securityContext.runAsUser | [0]", + doc, + ) + == 3000 ) - assert 3000 == jmespath.search( - "spec.template.spec.containers[?name=='git-sync'].securityContext.runAsUser | [0]", - doc, + assert ( + jmespath.search( + "spec.template.spec.containers[?name=='git-sync'].securityContext.runAsUser | [0]", + doc, + ) + == 3000 ) @@ -130,8 +136,8 @@ def test_check_default_setting(self): ) for doc in docs: - assert 6000 == jmespath.search("spec.template.spec.securityContext.runAsUser", doc) - assert 60 == jmespath.search("spec.template.spec.securityContext.fsGroup", doc) + assert jmespath.search("spec.template.spec.securityContext.runAsUser", doc) == 6000 + assert jmespath.search("spec.template.spec.securityContext.fsGroup", doc) == 60 # Test priority: # .securityContext > securityContext > uid + gid @@ -168,8 +174,8 @@ def test_check_local_setting(self): ) for doc in docs: - assert 9000 == jmespath.search("spec.template.spec.securityContext.runAsUser", doc) - assert 90 == jmespath.search("spec.template.spec.securityContext.fsGroup", doc) + assert jmespath.search("spec.template.spec.securityContext.runAsUser", doc) == 9000 + assert jmespath.search("spec.template.spec.securityContext.fsGroup", doc) == 90 # Test containerSecurity priority over uid under components using localSecurityContext def test_check_local_uid(self): @@ -186,7 +192,7 @@ def test_check_local_uid(self): ) for doc in docs: - assert 7000 == jmespath.search("spec.template.spec.securityContext.runAsUser", doc) + assert jmespath.search("spec.template.spec.securityContext.runAsUser", doc) == 7000 # Test containerSecurity priority over uid under dags.gitSync def test_gitsync_sidecar_and_init_container(self): @@ -207,13 +213,19 @@ def test_gitsync_sidecar_and_init_container(self): assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", doc) ] - assert 8000 == jmespath.search( - "spec.template.spec.initContainers[?name=='git-sync-init'].securityContext.runAsUser | [0]", - doc, + assert ( + jmespath.search( + "spec.template.spec.initContainers[?name=='git-sync-init'].securityContext.runAsUser | [0]", + doc, + ) + == 8000 ) - assert 8000 == jmespath.search( - "spec.template.spec.containers[?name=='git-sync'].securityContext.runAsUser | [0]", - doc, + assert ( + jmespath.search( + "spec.template.spec.containers[?name=='git-sync'].securityContext.runAsUser | [0]", + doc, + ) + == 8000 ) # Test securityContexts for main containers diff --git a/helm_tests/webserver/test_ingress_flower.py b/helm_tests/webserver/test_ingress_flower.py index 107bf5b270f9c..aedbf25931deb 100644 --- a/helm_tests/webserver/test_ingress_flower.py +++ b/helm_tests/webserver/test_ingress_flower.py @@ -58,7 +58,7 @@ def test_should_set_ingress_class_name(self): }, show_only=["templates/flower/flower-ingress.yaml"], ) - assert "foo" == jmespath.search("spec.ingressClassName", docs[0]) + assert jmespath.search("spec.ingressClassName", docs[0]) == "foo" def test_should_ingress_hosts_objs_have_priority_over_host(self): docs = render_chart( @@ -81,14 +81,18 @@ def test_should_ingress_hosts_objs_have_priority_over_host(self): }, show_only=["templates/flower/flower-ingress.yaml"], ) - assert ["*.a-host", "b-host", "c-host", "d-host", "e-host"] == jmespath.search( - "spec.rules[*].host", docs[0] - ) - assert [ + assert jmespath.search("spec.rules[*].host", docs[0]) == [ + "*.a-host", + "b-host", + "c-host", + "d-host", + "e-host", + ] + assert jmespath.search("spec.tls[*]", docs[0]) == [ {"hosts": ["*.a-host"], "secretName": "newsecret1"}, {"hosts": ["b-host"], "secretName": "newsecret2"}, {"hosts": ["c-host"], "secretName": "newsecret1"}, - ] == jmespath.search("spec.tls[*]", docs[0]) + ] def test_should_ingress_hosts_strs_have_priority_over_host(self): docs = render_chart( @@ -106,10 +110,10 @@ def test_should_ingress_hosts_strs_have_priority_over_host(self): show_only=["templates/flower/flower-ingress.yaml"], ) - assert ["*.a-host", "b-host", "c-host", "d-host"] == jmespath.search("spec.rules[*].host", docs[0]) - assert [ + assert jmespath.search("spec.rules[*].host", docs[0]) == ["*.a-host", "b-host", "c-host", "d-host"] + assert jmespath.search("spec.tls[*]", docs[0]) == [ {"hosts": ["*.a-host", "b-host", "c-host", "d-host"], "secretName": "secret"} - ] == jmespath.search("spec.tls[*]", docs[0]) + ] def test_should_ingress_deprecated_host_and_top_level_tls_still_work(self): docs = render_chart( @@ -159,7 +163,7 @@ def test_ingress_created(self, global_value, flower_value, expected): if values["ingress"] == {}: del values["ingress"] docs = render_chart(values=values, show_only=["templates/flower/flower-ingress.yaml"]) - assert expected == (1 == len(docs)) + assert expected == (len(docs) == 1) def test_ingress_not_created_flower_disabled(self): docs = render_chart( @@ -170,7 +174,7 @@ def test_ingress_not_created_flower_disabled(self): }, show_only=["templates/flower/flower-ingress.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_should_add_component_specific_labels(self): docs = render_chart( @@ -214,12 +218,12 @@ def test_can_ingress_hosts_be_templated(self): namespace="airflow", ) - assert [ + assert jmespath.search("spec.rules[*].host", docs[0]) == [ "*.airflow.example.com", "aa.example.com", "cc.example.com", "dd.example.com", - ] == jmespath.search("spec.rules[*].host", docs[0]) + ] def test_backend_service_name(self): docs = render_chart( @@ -227,8 +231,9 @@ def test_backend_service_name(self): show_only=["templates/flower/flower-ingress.yaml"], ) - assert "release-name-flower" == jmespath.search( - "spec.rules[0].http.paths[0].backend.service.name", docs[0] + assert ( + jmespath.search("spec.rules[0].http.paths[0].backend.service.name", docs[0]) + == "release-name-flower" ) def test_backend_service_name_with_fullname_override(self): @@ -242,6 +247,7 @@ def test_backend_service_name_with_fullname_override(self): show_only=["templates/flower/flower-ingress.yaml"], ) - assert "test-basic-flower" == jmespath.search( - "spec.rules[0].http.paths[0].backend.service.name", docs[0] + assert ( + jmespath.search("spec.rules[0].http.paths[0].backend.service.name", docs[0]) + == "test-basic-flower" ) diff --git a/helm_tests/webserver/test_ingress_web.py b/helm_tests/webserver/test_ingress_web.py index 38c258c93b9c4..ee638d9c92e8c 100644 --- a/helm_tests/webserver/test_ingress_web.py +++ b/helm_tests/webserver/test_ingress_web.py @@ -43,14 +43,14 @@ def test_should_allow_more_than_one_annotation(self): values={"ingress": {"web": {"enabled": True, "annotations": {"aa": "bb", "cc": "dd"}}}}, show_only=["templates/webserver/webserver-ingress.yaml"], ) - assert {"aa": "bb", "cc": "dd"} == jmespath.search("metadata.annotations", docs[0]) + assert jmespath.search("metadata.annotations", docs[0]) == {"aa": "bb", "cc": "dd"} def test_should_set_ingress_class_name(self): docs = render_chart( values={"ingress": {"web": {"enabled": True, "ingressClassName": "foo"}}}, show_only=["templates/webserver/webserver-ingress.yaml"], ) - assert "foo" == jmespath.search("spec.ingressClassName", docs[0]) + assert jmespath.search("spec.ingressClassName", docs[0]) == "foo" def test_should_ingress_hosts_objs_have_priority_over_host(self): docs = render_chart( @@ -72,14 +72,18 @@ def test_should_ingress_hosts_objs_have_priority_over_host(self): }, show_only=["templates/webserver/webserver-ingress.yaml"], ) - assert ["*.a-host", "b-host", "c-host", "d-host", "e-host"] == jmespath.search( - "spec.rules[*].host", docs[0] - ) - assert [ + assert jmespath.search("spec.rules[*].host", docs[0]) == [ + "*.a-host", + "b-host", + "c-host", + "d-host", + "e-host", + ] + assert jmespath.search("spec.tls[*]", docs[0]) == [ {"hosts": ["*.a-host"], "secretName": "newsecret1"}, {"hosts": ["b-host"], "secretName": "newsecret2"}, {"hosts": ["c-host"], "secretName": "newsecret1"}, - ] == jmespath.search("spec.tls[*]", docs[0]) + ] def test_should_ingress_hosts_strs_have_priority_over_host(self): docs = render_chart( @@ -95,10 +99,10 @@ def test_should_ingress_hosts_strs_have_priority_over_host(self): }, show_only=["templates/webserver/webserver-ingress.yaml"], ) - assert ["*.a-host", "b-host", "c-host", "d-host"] == jmespath.search("spec.rules[*].host", docs[0]) - assert [ + assert jmespath.search("spec.rules[*].host", docs[0]) == ["*.a-host", "b-host", "c-host", "d-host"] + assert jmespath.search("spec.tls[*]", docs[0]) == [ {"hosts": ["*.a-host", "b-host", "c-host", "d-host"], "secretName": "secret"} - ] == jmespath.search("spec.tls[*]", docs[0]) + ] def test_should_ingress_deprecated_host_and_top_level_tls_still_work(self): docs = render_chart( @@ -153,7 +157,7 @@ def test_ingress_created(self, global_value, web_value, expected): if values["ingress"] == {}: del values["ingress"] docs = render_chart(values=values, show_only=["templates/webserver/webserver-ingress.yaml"]) - assert expected == (1 == len(docs)) + assert expected == (len(docs) == 1) def test_should_add_component_specific_labels(self): docs = render_chart( @@ -194,12 +198,12 @@ def test_can_ingress_hosts_be_templated(self): namespace="airflow", ) - assert [ + assert jmespath.search("spec.rules[*].host", docs[0]) == [ "*.airflow.example.com", "aa.example.com", "cc.example.com", "dd.example.com", - ] == jmespath.search("spec.rules[*].host", docs[0]) + ] def test_backend_service_name(self): docs = render_chart( @@ -207,8 +211,9 @@ def test_backend_service_name(self): show_only=["templates/webserver/webserver-ingress.yaml"], ) - assert "release-name-webserver" == jmespath.search( - "spec.rules[0].http.paths[0].backend.service.name", docs[0] + assert ( + jmespath.search("spec.rules[0].http.paths[0].backend.service.name", docs[0]) + == "release-name-webserver" ) def test_backend_service_name_with_fullname_override(self): @@ -221,6 +226,7 @@ def test_backend_service_name_with_fullname_override(self): show_only=["templates/webserver/webserver-ingress.yaml"], ) - assert "test-basic-webserver" == jmespath.search( - "spec.rules[0].http.paths[0].backend.service.name", docs[0] + assert ( + jmespath.search("spec.rules[0].http.paths[0].backend.service.name", docs[0]) + == "test-basic-webserver" ) diff --git a/helm_tests/webserver/test_webserver.py b/helm_tests/webserver/test_webserver.py index 84e12ef5bc697..c0014b6f62a56 100644 --- a/helm_tests/webserver/test_webserver.py +++ b/helm_tests/webserver/test_webserver.py @@ -36,7 +36,7 @@ def test_can_be_disabled(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_should_remove_replicas_field(self): docs = render_chart( @@ -160,9 +160,9 @@ def test_should_use_templated_base_url_for_probes(self): assert {"name": "Host", "value": "release-name.com"} in jmespath.search( "startupProbe.httpGet.httpHeaders", container ) - assert "/mypath/release-name/path/health" == jmespath.search("livenessProbe.httpGet.path", container) - assert "/mypath/release-name/path/health" == jmespath.search("readinessProbe.httpGet.path", container) - assert "/mypath/release-name/path/health" == jmespath.search("startupProbe.httpGet.path", container) + assert jmespath.search("livenessProbe.httpGet.path", container) == "/mypath/release-name/path/health" + assert jmespath.search("readinessProbe.httpGet.path", container) == "/mypath/release-name/path/health" + assert jmespath.search("startupProbe.httpGet.path", container) == "/mypath/release-name/path/health" def test_should_add_scheme_to_liveness_and_readiness_and_startup_probes(self): docs = render_chart( @@ -217,10 +217,10 @@ def test_should_add_extra_containers(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "airflow", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_template_extra_containers(self): docs = render_chart( @@ -233,9 +233,9 @@ def test_should_template_extra_containers(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == { "name": "release-name-test-container", - } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) + } def test_should_add_extraEnvs(self): docs = render_chart( @@ -282,12 +282,14 @@ def test_should_add_extra_volume_and_extra_volume_mount(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) == "test-volume-airflow" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[-1].name", docs[0]) + == "test-volume-airflow" ) - assert "test-volume-airflow" == jmespath.search( - "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0]) + == "test-volume-airflow" ) def test_should_add_global_volume_and_global_volume_mount(self): @@ -299,9 +301,10 @@ def test_should_add_global_volume_and_global_volume_mount(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "test-volume" == jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) - assert "test-volume" == jmespath.search( - "spec.template.spec.containers[0].volumeMounts[-1].name", docs[0] + assert jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) == "test-volume" + assert ( + jmespath.search("spec.template.spec.containers[0].volumeMounts[-1].name", docs[0]) + == "test-volume" ) def test_should_add_extraEnvs_to_wait_for_migration_container(self): @@ -365,10 +368,10 @@ def test_should_add_extra_init_containers(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", - } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) + } def test_should_template_extra_init_containers(self): docs = render_chart( @@ -380,9 +383,9 @@ def test_should_template_extra_init_containers(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) == { "name": "release-name-init-container", - } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) + } def test_should_add_component_specific_labels(self): docs = render_chart( @@ -423,33 +426,42 @@ def test_should_create_valid_affinity_tolerations_and_node_selector(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "Deployment" == jmespath.search("kind", docs[0]) - assert "foo" == jmespath.search( - "spec.template.spec.affinity.nodeAffinity." - "requiredDuringSchedulingIgnoredDuringExecution." - "nodeSelectorTerms[0]." - "matchExpressions[0]." - "key", - docs[0], + assert jmespath.search("kind", docs[0]) == "Deployment" + assert ( + jmespath.search( + "spec.template.spec.affinity.nodeAffinity." + "requiredDuringSchedulingIgnoredDuringExecution." + "nodeSelectorTerms[0]." + "matchExpressions[0]." + "key", + docs[0], + ) + == "foo" ) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.diskType", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.diskType", + docs[0], + ) + == "ssd" ) - assert "dynamic-pods" == jmespath.search( - "spec.template.spec.tolerations[0].key", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.tolerations[0].key", + docs[0], + ) + == "dynamic-pods" ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/webserver/webserver-deployment.yaml"]) - assert {"component": "webserver"} == jmespath.search( + assert jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], - ) + ) == {"component": "webserver"} def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and webserver affinity etc, webserver affinity etc is used.""" @@ -513,13 +525,16 @@ def test_affinity_tolerations_topology_spread_constraints_and_node_selector_prec ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) - assert "ssd" == jmespath.search( - "spec.template.spec.nodeSelector.type", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.nodeSelector.type", + docs[0], + ) + == "ssd" ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) - assert 1 == len(tolerations) - assert "dynamic-pods" == tolerations[0]["key"] + assert len(tolerations) == 1 + assert tolerations[0]["key"] == "dynamic-pods" assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) @@ -530,9 +545,12 @@ def test_scheduler_name(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "airflow-scheduler" == jmespath.search( - "spec.template.spec.schedulerName", - docs[0], + assert ( + jmespath.search( + "spec.template.spec.schedulerName", + docs[0], + ) + == "airflow-scheduler" ) @pytest.mark.parametrize( @@ -610,25 +628,27 @@ def test_webserver_resources_are_configurable(self): }, show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) - assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) == "128Mi" + assert jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) == "200m" - assert "169Mi" == jmespath.search( - "spec.template.spec.containers[0].resources.requests.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[0].resources.requests.memory", docs[0]) == "169Mi" ) - assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) == "300m" # initContainer wait-for-airflow-migrations - assert "128Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.limits.memory", docs[0]) + == "128Mi" ) - assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) + assert jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) == "200m" - assert "169Mi" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.memory", docs[0]) + == "169Mi" ) - assert "300m" == jmespath.search( - "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] + assert ( + jmespath.search("spec.template.spec.initContainers[0].resources.requests.cpu", docs[0]) == "300m" ) def test_webserver_security_contexts_are_configurable(self): @@ -651,16 +671,17 @@ def test_webserver_security_contexts_are_configurable(self): }, show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( - "spec.template.spec.containers[0].securityContext", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].securityContext", docs[0]) == { + "allowPrivilegeEscalation": False, + "readOnlyRootFilesystem": True, + } - assert { + assert jmespath.search("spec.template.spec.securityContext", docs[0]) == { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, - } == jmespath.search("spec.template.spec.securityContext", docs[0]) + } def test_webserver_security_context_legacy(self): docs = render_chart( @@ -677,12 +698,12 @@ def test_webserver_security_context_legacy(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.securityContext", docs[0]) == { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, - } == jmespath.search("spec.template.spec.securityContext", docs[0]) + } def test_webserver_resources_are_not_added_by_default(self): docs = render_chart( @@ -744,9 +765,11 @@ def test_default_command_and_args(self): docs = render_chart(show_only=["templates/webserver/webserver-deployment.yaml"]) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None - assert ["bash", "-c", "exec airflow webserver"] == jmespath.search( - "spec.template.spec.containers[0].args", docs[0] - ) + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [ + "bash", + "-c", + "exec airflow webserver", + ] @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) @@ -765,8 +788,8 @@ def test_command_and_args_overrides_are_templated(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"] @pytest.mark.parametrize( "airflow_version, dag_values", @@ -791,7 +814,7 @@ def test_no_dags_mount_or_volume_or_gitsync_sidecar_expected(self, airflow_versi vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] assert "dags" not in [vm["name"] for vm in jmespath.search("spec.template.spec.volumes", docs[0])] - assert 1 == len(jmespath.search("spec.template.spec.containers", docs[0])) + assert len(jmespath.search("spec.template.spec.containers", docs[0])) == 1 @pytest.mark.parametrize( "airflow_version, dag_values, expected_read_only", @@ -844,8 +867,8 @@ def test_dags_persistence_volume_no_sidecar(self, dags_values, expected_claim_na "persistentVolumeClaim": {"claimName": expected_claim_name}, } in jmespath.search("spec.template.spec.volumes", docs[0]) # No gitsync sidecar or init container - assert 1 == len(jmespath.search("spec.template.spec.containers", docs[0])) - assert 1 == len(jmespath.search("spec.template.spec.initContainers", docs[0])) + assert len(jmespath.search("spec.template.spec.containers", docs[0])) == 1 + assert len(jmespath.search("spec.template.spec.initContainers", docs[0])) == 1 def test_should_add_component_specific_annotations(self): docs = render_chart( @@ -869,8 +892,8 @@ def test_webserver_pod_hostaliases(self): show_only=["templates/webserver/webserver-deployment.yaml"], ) - assert "127.0.0.1" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) - assert "foo.local" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) + assert jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) == "127.0.0.1" + assert jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) == "foo.local" def test_should_add_annotations_to_webserver_configmap(self): docs = render_chart( @@ -909,12 +932,14 @@ def test_default_service(self): show_only=["templates/webserver/webserver-service.yaml"], ) - assert "release-name-webserver" == jmespath.search("metadata.name", docs[0]) + assert jmespath.search("metadata.name", docs[0]) == "release-name-webserver" assert jmespath.search("metadata.annotations", docs[0]) is None - assert {"tier": "airflow", "component": "webserver", "release": "release-name"} == jmespath.search( - "spec.selector", docs[0] - ) - assert "ClusterIP" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("spec.selector", docs[0]) == { + "tier": "airflow", + "component": "webserver", + "release": "release-name", + } + assert jmespath.search("spec.type", docs[0]) == "ClusterIP" assert {"name": "airflow-ui", "port": 8080} in jmespath.search("spec.ports", docs[0]) def test_overrides(self): @@ -933,11 +958,11 @@ def test_overrides(self): show_only=["templates/webserver/webserver-service.yaml"], ) - assert {"foo": "bar"} == jmespath.search("metadata.annotations", docs[0]) - assert "LoadBalancer" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("metadata.annotations", docs[0]) == {"foo": "bar"} + assert jmespath.search("spec.type", docs[0]) == "LoadBalancer" assert {"name": "airflow-ui", "port": 9000} in jmespath.search("spec.ports", docs[0]) - assert "127.0.0.1" == jmespath.search("spec.loadBalancerIP", docs[0]) - assert ["10.123.0.0/16"] == jmespath.search("spec.loadBalancerSourceRanges", docs[0]) + assert jmespath.search("spec.loadBalancerIP", docs[0]) == "127.0.0.1" + assert jmespath.search("spec.loadBalancerSourceRanges", docs[0]) == ["10.123.0.0/16"] @pytest.mark.parametrize( "ports, expected_ports", @@ -1008,7 +1033,7 @@ def test_nodeport_service(self, ports, expected_ports): show_only=["templates/webserver/webserver-service.yaml"], ) - assert "NodePort" == jmespath.search("spec.type", docs[0]) + assert jmespath.search("spec.type", docs[0]) == "NodePort" assert expected_ports == jmespath.search("spec.ports", docs[0]) @@ -1017,7 +1042,7 @@ class TestWebserverConfigmap: def test_no_webserver_config_configmap_by_default(self): docs = render_chart(show_only=["templates/configmaps/webserver-configmap.yaml"]) - assert 0 == len(docs) + assert len(docs) == 0 def test_no_webserver_config_configmap_with_configmap_name(self): docs = render_chart( @@ -1029,7 +1054,7 @@ def test_no_webserver_config_configmap_with_configmap_name(self): }, show_only=["templates/configmaps/webserver-configmap.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_webserver_config_configmap(self): docs = render_chart( @@ -1037,11 +1062,11 @@ def test_webserver_config_configmap(self): show_only=["templates/configmaps/webserver-configmap.yaml"], ) - assert "ConfigMap" == docs[0]["kind"] - assert "release-name-webserver-config" == jmespath.search("metadata.name", docs[0]) + assert docs[0]["kind"] == "ConfigMap" + assert jmespath.search("metadata.name", docs[0]) == "release-name-webserver-config" assert ( - "CSRF_ENABLED = True # release-name" - == jmespath.search('data."webserver_config.py"', docs[0]).strip() + jmespath.search('data."webserver_config.py"', docs[0]).strip() + == "CSRF_ENABLED = True # release-name" ) @@ -1052,7 +1077,7 @@ def test_off_by_default(self): docs = render_chart( show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) - assert 0 == len(docs) + assert len(docs) == 0 def test_defaults(self): docs = render_chart( @@ -1069,12 +1094,12 @@ def test_defaults(self): show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) - assert 1 == len(docs) - assert "NetworkPolicy" == docs[0]["kind"] - assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( - "spec.ingress[0].from", docs[0] - ) - assert [{"port": 8080}] == jmespath.search("spec.ingress[0].ports", docs[0]) + assert len(docs) == 1 + assert docs[0]["kind"] == "NetworkPolicy" + assert jmespath.search("spec.ingress[0].from", docs[0]) == [ + {"namespaceSelector": {"matchLabels": {"release": "myrelease"}}} + ] + assert jmespath.search("spec.ingress[0].ports", docs[0]) == [{"port": 8080}] @pytest.mark.parametrize( "ports, expected_ports", @@ -1121,9 +1146,9 @@ def test_deprecated_from_param(self): show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) - assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( - "spec.ingress[0].from", docs[0] - ) + assert jmespath.search("spec.ingress[0].from", docs[0]) == [ + {"namespaceSelector": {"matchLabels": {"release": "myrelease"}}} + ] def test_should_add_component_specific_labels(self): docs = render_chart( diff --git a/providers/src/airflow/providers/arangodb/sensors/arangodb.py b/providers/src/airflow/providers/arangodb/sensors/arangodb.py index 042a2415fbe5c..aaa02ddc6bd43 100644 --- a/providers/src/airflow/providers/arangodb/sensors/arangodb.py +++ b/providers/src/airflow/providers/arangodb/sensors/arangodb.py @@ -52,4 +52,4 @@ def poke(self, context: Context) -> bool: hook = ArangoDBHook(self.arangodb_conn_id) records = hook.query(self.query, count=True).count() self.log.info("Total records found: %d", records) - return 0 != records + return records != 0 diff --git a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py index 32f840f69b810..56c3a786795a7 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +++ b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py @@ -253,7 +253,7 @@ def clear_not_launched_queued_tasks(self, session: Session = NEW_SESSION) -> Non ) if self.kubernetes_queue: query = query.where(TaskInstance.queue == self.kubernetes_queue) - elif hybrid_executor_enabled and KUBERNETES_EXECUTOR == default_executor: + elif hybrid_executor_enabled and default_executor == KUBERNETES_EXECUTOR: query = query.where( or_( TaskInstance.executor == KUBERNETES_EXECUTOR, diff --git a/providers/src/airflow/providers/dbt/cloud/utils/openlineage.py b/providers/src/airflow/providers/dbt/cloud/utils/openlineage.py index 470c153cf0fb6..cb1c8e3132f77 100644 --- a/providers/src/airflow/providers/dbt/cloud/utils/openlineage.py +++ b/providers/src/airflow/providers/dbt/cloud/utils/openlineage.py @@ -39,14 +39,14 @@ def _get_logical_date(task_instance): # todo: remove when min airflow version >= 3.0 - if _AIRFLOW_VERSION < parse("3"): + if parse("3") > _AIRFLOW_VERSION: return task_instance.execution_date return task_instance.logical_date def _get_try_number(val): # todo: remove when min airflow version >= 2.10.0 - if _AIRFLOW_VERSION < parse("2.10.0"): + if parse("2.10.0") > _AIRFLOW_VERSION: return val.try_number - 1 else: return val.try_number diff --git a/providers/src/airflow/providers/google/cloud/sensors/dataproc.py b/providers/src/airflow/providers/google/cloud/sensors/dataproc.py index 4b1a5b98d2045..b8385c5b58b51 100644 --- a/providers/src/airflow/providers/google/cloud/sensors/dataproc.py +++ b/providers/src/airflow/providers/google/cloud/sensors/dataproc.py @@ -107,10 +107,10 @@ def poke(self, context: Context) -> bool: }: message = f"Job was cancelled:\n{job}" raise AirflowException(message) - elif JobStatus.State.DONE == state: + elif state == JobStatus.State.DONE: self.log.debug("Job %s completed successfully.", self.dataproc_job_id) return True - elif JobStatus.State.ATTEMPT_FAILURE == state: + elif state == JobStatus.State.ATTEMPT_FAILURE: self.log.debug("Job %s attempt has failed.", self.dataproc_job_id) self.log.info("Waiting for job %s to complete.", self.dataproc_job_id) diff --git a/providers/tests/amazon/aws/executors/batch/test_batch_executor.py b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py index fa3e90ce52ca1..3b02d11250125 100644 --- a/providers/tests/amazon/aws/executors/batch/test_batch_executor.py +++ b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py @@ -633,7 +633,7 @@ def test_try_adopt_task_instances(self, mock_executor): # Two of the three tasks should be adopted. assert len(orphaned_tasks) - 1 == len(mock_executor.active_workers) # The remaining one task is unable to be adopted. - assert 1 == len(not_adopted_tasks) + assert len(not_adopted_tasks) == 1 class TestBatchExecutorConfig: diff --git a/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py index 9061ceabb7a8b..0158df0ca6eb2 100644 --- a/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py +++ b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py @@ -304,21 +304,21 @@ def test_queued_tasks(self): ), ] for task in queued_tasks: - assert State.QUEUED == task.get_task_state() + assert task.get_task_state() == State.QUEUED def test_running_tasks(self): """Tasks that have been launched are identified as 'running'.""" running_task = EcsExecutorTask( task_arn=ARN1, last_status="RUNNING", desired_status="RUNNING", containers=[{}] ) - assert State.RUNNING == running_task.get_task_state() + assert running_task.get_task_state() == State.RUNNING def test_running_tasks_edge_cases(self): """Tasks that are not finished have been launched are identified as 'running'.""" running_task = EcsExecutorTask( task_arn=ARN1, last_status="QUEUED", desired_status="SUCCESS", containers=[{}] ) - assert State.RUNNING == running_task.get_task_state() + assert running_task.get_task_state() == State.RUNNING def test_removed_tasks(self): """Tasks that failed to launch are identified as 'removed'.""" @@ -332,7 +332,7 @@ def test_removed_tasks(self): ), ] for task in deprovisioning_tasks: - assert State.REMOVED == task.get_task_state() + assert task.get_task_state() == State.REMOVED removed_task = EcsExecutorTask( task_arn="DEAD", @@ -341,7 +341,7 @@ def test_removed_tasks(self): containers=[{}], stopped_reason="Timeout waiting for network interface provisioning to complete.", ) - assert State.REMOVED == removed_task.get_task_state() + assert removed_task.get_task_state() == State.REMOVED def test_stopped_tasks(self): """Tasks that have terminated are identified as either 'success' or 'failure'.""" @@ -357,7 +357,7 @@ def test_stopped_tasks(self): started_at=dt.datetime.now(), containers=[successful_container], ) - assert State.SUCCESS == success_task.get_task_state() + assert success_task.get_task_state() == State.SUCCESS for status in ("DEACTIVATING", "STOPPING", "DEPROVISIONING", "STOPPED"): failed_task = EcsExecutorTask( @@ -368,7 +368,7 @@ def test_stopped_tasks(self): started_at=dt.datetime.now(), containers=[successful_container, successful_container, error_container], ) - assert State.FAILED == failed_task.get_task_state() + assert failed_task.get_task_state() == State.FAILED class TestAwsEcsExecutor: @@ -392,15 +392,15 @@ def test_execute(self, change_state_mock, mock_airflow_key, mock_executor): "failures": [], } - assert 0 == len(mock_executor.pending_tasks) + assert len(mock_executor.pending_tasks) == 0 mock_executor.execute_async(airflow_key, mock_cmd) - assert 1 == len(mock_executor.pending_tasks) + assert len(mock_executor.pending_tasks) == 1 mock_executor.attempt_task_runs() mock_executor.ecs.run_task.assert_called_once() # Task is stored in active worker. - assert 1 == len(mock_executor.active_workers) + assert len(mock_executor.active_workers) == 1 assert ARN1 in mock_executor.active_workers.task_by_key(airflow_key).task_arn change_state_mock.assert_called_once_with( airflow_key, TaskInstanceState.RUNNING, ARN1, remove_running=False @@ -1004,7 +1004,7 @@ def test_executor_config_exceptions(self, bad_config, mock_executor): mock_executor.execute_async(mock_airflow_key, mock_cmd, executor_config=bad_config) assert raised.match('Executor Config should never override "name" or "command"') - assert 0 == len(mock_executor.pending_tasks) + assert len(mock_executor.pending_tasks) == 0 @mock.patch.object(ecs_executor_config, "build_task_kwargs") def test_container_not_found(self, mock_build_task_kwargs, mock_executor): @@ -1018,7 +1018,7 @@ def test_container_not_found(self, mock_build_task_kwargs, mock_executor): '"overrides[containerOverrides][containers][x][command]"' ) ) - assert 0 == len(mock_executor.pending_tasks) + assert len(mock_executor.pending_tasks) == 0 def _mock_sync( self, @@ -1190,7 +1190,7 @@ def test_try_adopt_task_instances(self, mock_executor): # Two of the three tasks should be adopted. assert len(orphaned_tasks) - 1 == len(mock_executor.active_workers) # The remaining one task is unable to be adopted. - assert 1 == len(not_adopted_tasks) + assert len(not_adopted_tasks) == 1 class TestEcsExecutorConfig: diff --git a/providers/tests/amazon/aws/hooks/test_s3.py b/providers/tests/amazon/aws/hooks/test_s3.py index 920948791ef10..b4f0af43f0588 100644 --- a/providers/tests/amazon/aws/hooks/test_s3.py +++ b/providers/tests/amazon/aws/hooks/test_s3.py @@ -273,12 +273,12 @@ def test_list_prefixes(self, s3_bucket): bucket.put_object(Key="dir/b", Body=b"b") bucket.put_object(Key="dir/sub_dir/c", Body=b"c") - assert [] == hook.list_prefixes(s3_bucket, prefix="non-existent/") - assert [] == hook.list_prefixes(s3_bucket) - assert ["dir/"] == hook.list_prefixes(s3_bucket, delimiter="/") - assert [] == hook.list_prefixes(s3_bucket, prefix="dir/") - assert ["dir/sub_dir/"] == hook.list_prefixes(s3_bucket, delimiter="/", prefix="dir/") - assert [] == hook.list_prefixes(s3_bucket, prefix="dir/sub_dir/") + assert hook.list_prefixes(s3_bucket, prefix="non-existent/") == [] + assert hook.list_prefixes(s3_bucket) == [] + assert hook.list_prefixes(s3_bucket, delimiter="/") == ["dir/"] + assert hook.list_prefixes(s3_bucket, prefix="dir/") == [] + assert hook.list_prefixes(s3_bucket, delimiter="/", prefix="dir/") == ["dir/sub_dir/"] + assert hook.list_prefixes(s3_bucket, prefix="dir/sub_dir/") == [] def test_list_prefixes_paged(self, s3_bucket): hook = S3Hook() @@ -307,21 +307,27 @@ def test_list_keys(self, s3_bucket): def dummy_object_filter(keys, from_datetime=None, to_datetime=None): return [] - assert [] == hook.list_keys(s3_bucket, prefix="non-existent/") - assert ["a", "ba", "bxa", "bxb", "dir/b"] == hook.list_keys(s3_bucket) - assert ["a", "ba", "bxa", "bxb"] == hook.list_keys(s3_bucket, delimiter="/") - assert ["dir/b"] == hook.list_keys(s3_bucket, prefix="dir/") - assert ["ba", "bxa", "bxb", "dir/b"] == hook.list_keys(s3_bucket, start_after_key="a") - assert [] == hook.list_keys(s3_bucket, from_datetime=from_datetime, to_datetime=to_datetime) - assert [] == hook.list_keys( - s3_bucket, from_datetime=from_datetime, to_datetime=to_datetime, object_filter=dummy_object_filter + assert hook.list_keys(s3_bucket, prefix="non-existent/") == [] + assert hook.list_keys(s3_bucket) == ["a", "ba", "bxa", "bxb", "dir/b"] + assert hook.list_keys(s3_bucket, delimiter="/") == ["a", "ba", "bxa", "bxb"] + assert hook.list_keys(s3_bucket, prefix="dir/") == ["dir/b"] + assert hook.list_keys(s3_bucket, start_after_key="a") == ["ba", "bxa", "bxb", "dir/b"] + assert hook.list_keys(s3_bucket, from_datetime=from_datetime, to_datetime=to_datetime) == [] + assert ( + hook.list_keys( + s3_bucket, + from_datetime=from_datetime, + to_datetime=to_datetime, + object_filter=dummy_object_filter, + ) + == [] ) - assert [] == hook.list_keys(s3_bucket, prefix="*a") - assert ["a", "ba", "bxa"] == hook.list_keys(s3_bucket, prefix="*a", apply_wildcard=True) - assert [] == hook.list_keys(s3_bucket, prefix="b*a") - assert ["ba", "bxa"] == hook.list_keys(s3_bucket, prefix="b*a", apply_wildcard=True) - assert [] == hook.list_keys(s3_bucket, prefix="b*") - assert ["ba", "bxa", "bxb"] == hook.list_keys(s3_bucket, prefix="b*", apply_wildcard=True) + assert hook.list_keys(s3_bucket, prefix="*a") == [] + assert hook.list_keys(s3_bucket, prefix="*a", apply_wildcard=True) == ["a", "ba", "bxa"] + assert hook.list_keys(s3_bucket, prefix="b*a") == [] + assert hook.list_keys(s3_bucket, prefix="b*a", apply_wildcard=True) == ["ba", "bxa"] + assert hook.list_keys(s3_bucket, prefix="b*") == [] + assert hook.list_keys(s3_bucket, prefix="b*", apply_wildcard=True) == ["ba", "bxa", "bxb"] def test_list_keys_paged(self, s3_bucket): hook = S3Hook() @@ -1209,10 +1215,10 @@ def test_function_with_test_key(self, test_key, bucket_name=None): fake_s3_hook = FakeS3Hook() test_bucket_name_with_wildcard_key = fake_s3_hook.test_function_with_wildcard_key("s3://foo/bar*.csv") - assert ("foo", "bar*.csv") == test_bucket_name_with_wildcard_key + assert test_bucket_name_with_wildcard_key == ("foo", "bar*.csv") test_bucket_name_with_key = fake_s3_hook.test_function_with_key("s3://foo/bar.csv") - assert ("foo", "bar.csv") == test_bucket_name_with_key + assert test_bucket_name_with_key == ("foo", "bar.csv") with pytest.raises(ValueError) as ctx: fake_s3_hook.test_function_with_test_key("s3://foo/bar.csv") diff --git a/providers/tests/amazon/aws/hooks/test_step_function.py b/providers/tests/amazon/aws/hooks/test_step_function.py index ce66447da68ec..f163a1f4bdda2 100644 --- a/providers/tests/amazon/aws/hooks/test_step_function.py +++ b/providers/tests/amazon/aws/hooks/test_step_function.py @@ -31,7 +31,7 @@ class TestStepFunctionHook: def test_get_conn_returns_a_boto3_connection(self): hook = StepFunctionHook(aws_conn_id="aws_default") - assert "stepfunctions" == hook.get_conn().meta.service_model.service_name + assert hook.get_conn().meta.service_model.service_name == "stepfunctions" def test_start_execution(self): hook = StepFunctionHook(aws_conn_id="aws_default", region_name="us-east-1") diff --git a/providers/tests/amazon/aws/log/test_s3_task_handler.py b/providers/tests/amazon/aws/log/test_s3_task_handler.py index 68e3ab8a3408f..70e37ef03ba4f 100644 --- a/providers/tests/amazon/aws/log/test_s3_task_handler.py +++ b/providers/tests/amazon/aws/log/test_s3_task_handler.py @@ -141,12 +141,12 @@ def test_read_when_s3_log_missing(self): ti.state = TaskInstanceState.SUCCESS self.s3_task_handler._read_from_logs_server = mock.Mock(return_value=([], [])) log, metadata = self.s3_task_handler.read(ti) - assert 1 == len(log) + assert len(log) == 1 assert len(log) == len(metadata) actual = log[0][0][-1] expected = "*** No logs found on s3 for ti=\n" assert expected in actual - assert {"end_of_log": True, "log_pos": 0} == metadata[0] + assert metadata[0] == {"end_of_log": True, "log_pos": 0} def test_s3_read_when_log_missing(self): handler = self.s3_task_handler diff --git a/providers/tests/amazon/aws/operators/test_glue.py b/providers/tests/amazon/aws/operators/test_glue.py index 6c1a2d2536228..755c6a18ec1e6 100644 --- a/providers/tests/amazon/aws/operators/test_glue.py +++ b/providers/tests/amazon/aws/operators/test_glue.py @@ -67,13 +67,13 @@ def test_render_template(self, create_task_instance_of_operator, session): session.commit() rendered_template: GlueJobOperator = ti.render_templates() - assert DAG_ID == rendered_template.script_location - assert DAG_ID == rendered_template.script_args - assert DAG_ID == rendered_template.create_job_kwargs - assert DAG_ID == rendered_template.iam_role_name - assert DAG_ID == rendered_template.iam_role_arn - assert DAG_ID == rendered_template.s3_bucket - assert DAG_ID == rendered_template.job_name + assert rendered_template.script_location == DAG_ID + assert rendered_template.script_args == DAG_ID + assert rendered_template.create_job_kwargs == DAG_ID + assert rendered_template.iam_role_name == DAG_ID + assert rendered_template.iam_role_arn == DAG_ID + assert rendered_template.s3_bucket == DAG_ID + assert rendered_template.job_name == DAG_ID @pytest.mark.parametrize( "script_location", diff --git a/providers/tests/amazon/aws/operators/test_kinesis_analytics.py b/providers/tests/amazon/aws/operators/test_kinesis_analytics.py index 35b15e85b519f..2cbdbe4c786d7 100644 --- a/providers/tests/amazon/aws/operators/test_kinesis_analytics.py +++ b/providers/tests/amazon/aws/operators/test_kinesis_analytics.py @@ -323,7 +323,7 @@ def test_execute_complete(self, kinesis_analytics_mock_conn): response = self.operator.execute_complete(context=None, event=event) - assert {"ApplicationARN": self.APPLICATION_ARN} == response + assert response == {"ApplicationARN": self.APPLICATION_ARN} @mock.patch.object(KinesisAnalyticsV2Hook, "conn") def test_execute_complete_failure(self, kinesis_analytics_mock_conn): @@ -485,7 +485,7 @@ def test_execute_complete(self, kinesis_analytics_mock_conn): response = self.operator.execute_complete(context=None, event=event) - assert {"ApplicationARN": self.APPLICATION_ARN} == response + assert response == {"ApplicationARN": self.APPLICATION_ARN} @mock.patch.object(KinesisAnalyticsV2Hook, "conn") def test_execute_complete_failure(self, kinesis_analytics_mock_conn): diff --git a/providers/tests/amazon/aws/operators/test_s3.py b/providers/tests/amazon/aws/operators/test_s3.py index 407084083da15..760a6ab53fb23 100644 --- a/providers/tests/amazon/aws/operators/test_s3.py +++ b/providers/tests/amazon/aws/operators/test_s3.py @@ -280,7 +280,7 @@ def test_execute_with_failing_transform_script(self, mock_popen): with pytest.raises(AirflowException) as ctx: op.execute(None) - assert "Transform script failed: 42" == str(ctx.value) + assert str(ctx.value) == "Transform script failed: 42" @mock.patch("subprocess.Popen") @mock_aws diff --git a/providers/tests/amazon/aws/secrets/test_secrets_manager.py b/providers/tests/amazon/aws/secrets/test_secrets_manager.py index fa824e3d7d474..574bfa06cab40 100644 --- a/providers/tests/amazon/aws/secrets/test_secrets_manager.py +++ b/providers/tests/amazon/aws/secrets/test_secrets_manager.py @@ -43,7 +43,7 @@ def test_get_conn_value_full_url_mode(self): secrets_manager_backend.client.create_secret(**create_param) returned_uri = secrets_manager_backend.get_conn_value(conn_id="test_postgres") - assert "postgresql://airflow:airflow@host:5432/airflow" == returned_uri + assert returned_uri == "postgresql://airflow:airflow@host:5432/airflow" @mock_aws def test_get_conn_value_non_existent_key(self): @@ -74,7 +74,7 @@ def test_get_variable(self): secrets_manager_backend.client.create_secret(**create_param) returned_uri = secrets_manager_backend.get_variable("hello") - assert "world" == returned_uri + assert returned_uri == "world" @mock_aws def test_get_variable_non_existent_key(self): diff --git a/providers/tests/amazon/aws/secrets/test_systems_manager.py b/providers/tests/amazon/aws/secrets/test_systems_manager.py index ecb3996263a2b..1a2a93518ddb0 100644 --- a/providers/tests/amazon/aws/secrets/test_systems_manager.py +++ b/providers/tests/amazon/aws/secrets/test_systems_manager.py @@ -108,7 +108,7 @@ def test_get_variable(self): ssm_backend.client.put_parameter(**param) returned_uri = ssm_backend.get_variable("hello") - assert "world" == returned_uri + assert returned_uri == "world" @mock_aws def test_get_config(self): @@ -122,7 +122,7 @@ def test_get_config(self): ssm_backend.client.put_parameter(**param) returned_uri = ssm_backend.get_config("sql_alchemy_conn") - assert "sqlite:///Users/test_user/airflow.db" == returned_uri + assert returned_uri == "sqlite:///Users/test_user/airflow.db" @mock_aws def test_get_variable_secret_string(self): @@ -130,7 +130,7 @@ def test_get_variable_secret_string(self): ssm_backend = SystemsManagerParameterStoreBackend() ssm_backend.client.put_parameter(**param) returned_uri = ssm_backend.get_variable("hello") - assert "world" == returned_uri + assert returned_uri == "world" @mock_aws def test_get_variable_non_existent_key(self): diff --git a/providers/tests/amazon/aws/transfers/test_base.py b/providers/tests/amazon/aws/transfers/test_base.py index b357b9ea8561d..7c526c66e84cb 100644 --- a/providers/tests/amazon/aws/transfers/test_base.py +++ b/providers/tests/amazon/aws/transfers/test_base.py @@ -61,5 +61,5 @@ def test_render_template(self, session, clean_dags_and_dagruns): session.add(ti) session.commit() ti.render_templates() - assert "2020-01-01" == getattr(operator, "source_aws_conn_id") - assert "2020-01-01" == getattr(operator, "dest_aws_conn_id") + assert getattr(operator, "source_aws_conn_id") == "2020-01-01" + assert getattr(operator, "dest_aws_conn_id") == "2020-01-01" diff --git a/providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py b/providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py index 0c9a56cd7d35b..e3da1d3ee45f3 100644 --- a/providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py @@ -86,7 +86,7 @@ def test_dynamodb_to_s3_success(self, mock_aws_dynamodb_hook, mock_s3_hook): dynamodb_to_s3_operator.execute(context={}) - assert [{"a": 1}, {"b": 2}, {"c": 3}] == self.output_queue + assert self.output_queue == [{"a": 1}, {"b": 2}, {"c": 3}] @patch("airflow.providers.amazon.aws.transfers.dynamodb_to_s3.S3Hook") @patch("airflow.providers.amazon.aws.transfers.dynamodb_to_s3.DynamoDBHook") @@ -115,7 +115,7 @@ def test_dynamodb_to_s3_success_with_decimal(self, mock_aws_dynamodb_hook, mock_ dynamodb_to_s3_operator.execute(context={}) - assert [{"a": float(a)}, {"b": float(b)}] == self.output_queue + assert self.output_queue == [{"a": float(a)}, {"b": float(b)}] @patch("airflow.providers.amazon.aws.transfers.dynamodb_to_s3.S3Hook") @patch("airflow.providers.amazon.aws.transfers.dynamodb_to_s3.DynamoDBHook") @@ -181,7 +181,7 @@ def test_dynamodb_to_s3_with_different_aws_conn_id(self, mock_aws_dynamodb_hook, dynamodb_to_s3_operator.execute(context={}) - assert [{"a": 1}, {"b": 2}, {"c": 3}] == self.output_queue + assert self.output_queue == [{"a": 1}, {"b": 2}, {"c": 3}] mock_s3_hook.assert_called_with(aws_conn_id=aws_conn_id) mock_aws_dynamodb_hook.assert_called_with(aws_conn_id=aws_conn_id) @@ -219,7 +219,7 @@ def test_dynamodb_to_s3_with_two_different_connections(self, mock_aws_dynamodb_h dynamodb_to_s3_operator.execute(context={}) - assert [{"a": 1}, {"b": 2}, {"c": 3}] == self.output_queue + assert self.output_queue == [{"a": 1}, {"b": 2}, {"c": 3}] mock_s3_hook.assert_called_with(aws_conn_id=s3_aws_conn_id) mock_aws_dynamodb_hook.assert_called_with(aws_conn_id=dynamodb_conn_id) @@ -255,7 +255,7 @@ def test_dynamodb_to_s3_with_just_dest_aws_conn_id(self, mock_aws_dynamodb_hook, dynamodb_to_s3_operator.execute(context={}) - assert [{"a": 1}, {"b": 2}, {"c": 3}] == self.output_queue + assert self.output_queue == [{"a": 1}, {"b": 2}, {"c": 3}] mock_aws_dynamodb_hook.assert_called_with(aws_conn_id="aws_default") mock_s3_hook.assert_called_with(aws_conn_id=s3_aws_conn_id) @@ -291,11 +291,11 @@ def test_render_template(self, session): session.add(ti) session.commit() ti.render_templates() - assert "2020-01-01" == getattr(operator, "source_aws_conn_id") - assert "2020-01-01" == getattr(operator, "dest_aws_conn_id") - assert "2020-01-01" == getattr(operator, "s3_bucket_name") - assert "2020-01-01" == getattr(operator, "dynamodb_table_name") - assert "2020-01-01" == getattr(operator, "s3_key_prefix") + assert getattr(operator, "source_aws_conn_id") == "2020-01-01" + assert getattr(operator, "dest_aws_conn_id") == "2020-01-01" + assert getattr(operator, "s3_bucket_name") == "2020-01-01" + assert getattr(operator, "dynamodb_table_name") == "2020-01-01" + assert getattr(operator, "s3_key_prefix") == "2020-01-01" @patch("airflow.providers.amazon.aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator._export_entire_data") def test_dynamodb_execute_calling_export_entire_data(self, _export_entire_data): diff --git a/providers/tests/amazon/aws/transfers/test_gcs_to_s3.py b/providers/tests/amazon/aws/transfers/test_gcs_to_s3.py index 4eb4dfa260554..50204a0251a8d 100644 --- a/providers/tests/amazon/aws/transfers/test_gcs_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_gcs_to_s3.py @@ -124,7 +124,7 @@ def test_execute_without_replace(self, mock_hook): # we expect nothing to be uploaded # and all the MOCK_FILES to be present at the S3 bucket uploaded_files = operator.execute(None) - assert [] == uploaded_files + assert uploaded_files == [] assert sorted(MOCK_FILES) == sorted(hook.list_keys("bucket", delimiter="/")) @pytest.mark.parametrize( @@ -158,7 +158,7 @@ def test_execute_without_replace_with_folder_structure(self, mock_hook, dest_s3_ # and all the MOCK_FILES to be present at the S3 bucket uploaded_files = operator.execute(None) - assert [] == uploaded_files + assert uploaded_files == [] assert sorted(mock_files_s3) == sorted(hook.list_keys("bucket", prefix="test/")) @mock.patch("airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook") diff --git a/providers/tests/apache/drill/hooks/test_drill.py b/providers/tests/apache/drill/hooks/test_drill.py index a02b74545531a..0e7f9c6994129 100644 --- a/providers/tests/apache/drill/hooks/test_drill.py +++ b/providers/tests/apache/drill/hooks/test_drill.py @@ -68,7 +68,7 @@ def get_connection(self, conn_id): def test_get_uri(self): db_hook = self.db_hook() - assert "drill://host:8047/dfs?dialect_driver=drill+sadrill" == db_hook.get_uri() + assert db_hook.get_uri() == "drill://host:8047/dfs?dialect_driver=drill+sadrill" def test_get_first_record(self): statement = "SQL" diff --git a/providers/tests/apache/druid/hooks/test_druid.py b/providers/tests/apache/druid/hooks/test_druid.py index 7f7c37340cef3..80ea15c2ad312 100644 --- a/providers/tests/apache/druid/hooks/test_druid.py +++ b/providers/tests/apache/druid/hooks/test_druid.py @@ -421,7 +421,7 @@ def test_get_conn_with_context( def test_get_uri(self): db_hook = self.db_hook() - assert "druid://host:1000/druid/v2/sql" == db_hook.get_uri() + assert db_hook.get_uri() == "druid://host:1000/druid/v2/sql" def test_get_first_record(self): statement = "SQL" diff --git a/providers/tests/apache/druid/operators/test_druid.py b/providers/tests/apache/druid/operators/test_druid.py index 951dc41f92bf7..c6326fb328658 100644 --- a/providers/tests/apache/druid/operators/test_druid.py +++ b/providers/tests/apache/druid/operators/test_druid.py @@ -68,7 +68,7 @@ def test_render_template(dag_maker): dag_maker.session.add(dag_run.task_instances[0]) dag_maker.session.commit() dag_run.task_instances[0].render_templates() - assert RENDERED_INDEX == json.loads(operator.json_index_file) + assert json.loads(operator.json_index_file) == RENDERED_INDEX @pytest.mark.need_serialized_dag @@ -90,7 +90,7 @@ def test_render_template_from_file(tmp_path, dag_maker): ) dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED).task_instances[0].render_templates() - assert RENDERED_INDEX == json.loads(operator.json_index_file) + assert json.loads(operator.json_index_file) == RENDERED_INDEX def test_init_with_timeout_and_max_ingestion_time(): diff --git a/providers/tests/apache/hdfs/hooks/test_webhdfs.py b/providers/tests/apache/hdfs/hooks/test_webhdfs.py index 2e19b6401b118..6eb32328cdccd 100644 --- a/providers/tests/apache/hdfs/hooks/test_webhdfs.py +++ b/providers/tests/apache/hdfs/hooks/test_webhdfs.py @@ -194,7 +194,7 @@ def test_simple_init(self): def test_init_proxy_user(self): hook = WebHDFSHook(proxy_user="someone") - assert "someone" == hook.proxy_user + assert hook.proxy_user == "someone" @patch("airflow.providers.apache.hdfs.hooks.webhdfs.KerberosClient", create=True) @patch("airflow.providers.apache.hdfs.hooks.webhdfs._kerberos_security_mode", return_value=True) @@ -211,7 +211,7 @@ def test_conn_kerberos_ssl(self, socket_mock, mock_kerberos_security_mode, mock_ connection = mock_get_connection.return_value assert f"https://{connection.host}:{connection.port}" == mock_kerberos_client.call_args.args[0] - assert "/ssl/cert/path" == mock_kerberos_client.call_args.kwargs["session"].verify + assert mock_kerberos_client.call_args.kwargs["session"].verify == "/ssl/cert/path" @patch("airflow.providers.apache.hdfs.hooks.webhdfs.InsecureClient") @patch("airflow.providers.apache.hdfs.hooks.webhdfs.socket") diff --git a/providers/tests/apache/hive/macros/test_hive.py b/providers/tests/apache/hive/macros/test_hive.py index b2bce20f8c1f0..e37683c74bcf7 100644 --- a/providers/tests/apache/hive/macros/test_hive.py +++ b/providers/tests/apache/hive/macros/test_hive.py @@ -32,10 +32,10 @@ def test_closest_ds_partition(self): target_dt = datetime.strptime("2017-04-27", "%Y-%m-%d") date_list = [date1, date2, date3, date4, date5] - assert "2017-04-26" == str(hive._closest_date(target_dt, date_list, True)) - assert "2017-04-28" == str(hive._closest_date(target_dt, date_list, False)) + assert str(hive._closest_date(target_dt, date_list, True)) == "2017-04-26" + assert str(hive._closest_date(target_dt, date_list, False)) == "2017-04-28" # when before is not set, the closest date should be returned - assert "2017-04-26" == str(hive._closest_date(target_dt, [date1, date2, date3, date5], None)) - assert "2017-04-28" == str(hive._closest_date(target_dt, [date1, date2, date4, date5])) - assert "2017-04-26" == str(hive._closest_date(target_dt, date_list)) + assert str(hive._closest_date(target_dt, [date1, date2, date3, date5], None)) == "2017-04-26" + assert str(hive._closest_date(target_dt, [date1, date2, date4, date5])) == "2017-04-28" + assert str(hive._closest_date(target_dt, date_list)) == "2017-04-26" diff --git a/providers/tests/apache/hive/sensors/test_named_hive_partition.py b/providers/tests/apache/hive/sensors/test_named_hive_partition.py index ff565c733d8bf..50558157b8e2a 100644 --- a/providers/tests/apache/hive/sensors/test_named_hive_partition.py +++ b/providers/tests/apache/hive/sensors/test_named_hive_partition.py @@ -81,7 +81,7 @@ def test_parse_partition_name_default(self): partition = "ds=2016-01-01/state=IT" name = f"{table}/{partition}" parsed_schema, parsed_table, parsed_partition = NamedHivePartitionSensor.parse_partition_name(name) - assert "default" == parsed_schema + assert parsed_schema == "default" assert table == parsed_table assert partition == parsed_partition diff --git a/providers/tests/apache/kafka/sensors/test_kafka.py b/providers/tests/apache/kafka/sensors/test_kafka.py index d802ac61b8fb3..04b50a970feea 100644 --- a/providers/tests/apache/kafka/sensors/test_kafka.py +++ b/providers/tests/apache/kafka/sensors/test_kafka.py @@ -67,7 +67,7 @@ def test_await_execute_complete(self): kafka_config_id="kafka_d", topics=["test"], task_id="test", apply_function=_return_true ) - assert "test" == sensor.execute_complete(context={}, event="test") + assert sensor.execute_complete(context={}, event="test") == "test" def test_await_message_trigger_event(self): sensor = AwaitMessageTriggerFunctionSensor( diff --git a/providers/tests/apache/kylin/operators/test_kylin_cube.py b/providers/tests/apache/kylin/operators/test_kylin_cube.py index ea0ea67827ab4..7d7ddcc5ce8fd 100644 --- a/providers/tests/apache/kylin/operators/test_kylin_cube.py +++ b/providers/tests/apache/kylin/operators/test_kylin_cube.py @@ -188,8 +188,8 @@ def test_render_template(self, session): session.add(ti) session.commit() ti.render_templates() - assert "learn_kylin" == getattr(operator, "project") - assert "kylin_sales_cube" == getattr(operator, "cube") - assert "build" == getattr(operator, "command") - assert "1483200000000" == getattr(operator, "start_time") - assert "1483286400000" == getattr(operator, "end_time") + assert getattr(operator, "project") == "learn_kylin" + assert getattr(operator, "cube") == "kylin_sales_cube" + assert getattr(operator, "command") == "build" + assert getattr(operator, "start_time") == "1483200000000" + assert getattr(operator, "end_time") == "1483286400000" diff --git a/providers/tests/apache/livy/hooks/test_livy.py b/providers/tests/apache/livy/hooks/test_livy.py index 3f2430283b6c9..6cb6dd911600c 100644 --- a/providers/tests/apache/livy/hooks/test_livy.py +++ b/providers/tests/apache/livy/hooks/test_livy.py @@ -341,7 +341,7 @@ def test_get_batch_state_missing(self, requests_mock): def test_parse_post_response(self): res_id = LivyHook._parse_post_response({"id": BATCH_ID, "log": []}) - assert BATCH_ID == res_id + assert res_id == BATCH_ID def test_delete_batch_success(self, requests_mock): requests_mock.register_uri( @@ -749,7 +749,7 @@ def test_parameters_validation(self): def test_parse_post_response(self): res_id = LivyAsyncHook._parse_post_response({"id": BATCH_ID, "log": []}) - assert BATCH_ID == res_id + assert res_id == BATCH_ID @pytest.mark.parametrize("valid_size", ["1m", "1mb", "1G", "1GB", "1Gb", None]) def test_validate_size_format_success(self, valid_size): @@ -802,8 +802,9 @@ def test_validate_extra_conf_failure(self, conf): LivyAsyncHook._validate_extra_conf(conf) def test_parse_request_response(self): - assert BATCH_ID == LivyAsyncHook._parse_request_response( - response={"id": BATCH_ID, "log": []}, parameter="id" + assert ( + LivyAsyncHook._parse_request_response(response={"id": BATCH_ID, "log": []}, parameter="id") + == BATCH_ID ) @pytest.mark.parametrize("conn_id", [100, 0]) diff --git a/providers/tests/apache/pig/operators/test_pig.py b/providers/tests/apache/pig/operators/test_pig.py index ef281fdabe406..86919b8edab6a 100644 --- a/providers/tests/apache/pig/operators/test_pig.py +++ b/providers/tests/apache/pig/operators/test_pig.py @@ -40,7 +40,7 @@ def test_prepare_template(self): # converts when pigparams_jinja_translate = true operator = PigOperator(pig=pig, task_id=task_id, pigparams_jinja_translate=True) operator.prepare_template() - assert "sh echo {{ DATE }};" == operator.pig + assert operator.pig == "sh echo {{ DATE }};" @pytest.mark.db_test @mock.patch.object(PigCliHook, "run_cli") diff --git a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py index 4b01536fbc526..cbca9f6e30bc1 100644 --- a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -120,12 +120,12 @@ def test_get_base_pod_from_template(self, mock_kubeconfig, mock_generator, data_ # so None will be passed to deserialize_model_dict(). pod_template_file_path = "/bar/biz" get_base_pod_from_template(pod_template_file_path, None) - assert "deserialize_model_dict" == mock_generator.mock_calls[0][0] + assert mock_generator.mock_calls[0][0] == "deserialize_model_dict" assert mock_generator.mock_calls[0][1][0] is None mock_kubeconfig.pod_template_file = "/foo/bar" get_base_pod_from_template(None, mock_kubeconfig) - assert "deserialize_model_dict" == mock_generator.mock_calls[1][0] + assert mock_generator.mock_calls[1][0] == "deserialize_model_dict" assert mock_generator.mock_calls[1][1][0] is None # Provide existent file path, @@ -136,12 +136,12 @@ def test_get_base_pod_from_template(self, mock_kubeconfig, mock_generator, data_ pod_template_file_path = pod_template_file.as_posix() get_base_pod_from_template(pod_template_file_path, None) - assert "deserialize_model_dict" == mock_generator.mock_calls[2][0] + assert mock_generator.mock_calls[2][0] == "deserialize_model_dict" assert mock_generator.mock_calls[2][1][0] == expected_pod_dict mock_kubeconfig.pod_template_file = pod_template_file.as_posix() get_base_pod_from_template(None, mock_kubeconfig) - assert "deserialize_model_dict" == mock_generator.mock_calls[3][0] + assert mock_generator.mock_calls[3][0] == "deserialize_model_dict" assert mock_generator.mock_calls[3][1][0] == expected_pod_dict def test_make_safe_label_value(self): @@ -155,7 +155,7 @@ def test_make_safe_label_value(self): dag_id = "my_dag_id" assert dag_id == pod_generator.make_safe_label_value(dag_id) dag_id = "my_dag_id_" + "a" * 64 - assert "my_dag_id_" + "a" * 43 + "-0ce114c45" == pod_generator.make_safe_label_value(dag_id) + assert pod_generator.make_safe_label_value(dag_id) == "my_dag_id_" + "a" * 43 + "-0ce114c45" def test_execution_date_serialize_deserialize(self): datetime_obj = datetime.now() diff --git a/providers/tests/cncf/kubernetes/test_pod_generator.py b/providers/tests/cncf/kubernetes/test_pod_generator.py index 635dd2843ffe9..abfbe686b64ee 100644 --- a/providers/tests/cncf/kubernetes/test_pod_generator.py +++ b/providers/tests/cncf/kubernetes/test_pod_generator.py @@ -486,8 +486,8 @@ def test_ensure_max_identifier_length(self, mock_rand_str, data_file): for v in result.metadata.labels.values(): assert len(v) <= 63 - assert "a" * 512 == result.metadata.annotations["dag_id"] - assert "a" * 512 == result.metadata.annotations["task_id"] + assert result.metadata.annotations["dag_id"] == "a" * 512 + assert result.metadata.annotations["task_id"] == "a" * 512 def test_merge_objects_empty(self): annotations = {"foo1": "bar1"} diff --git a/providers/tests/common/sql/hooks/test_dbapi.py b/providers/tests/common/sql/hooks/test_dbapi.py index 08bcf0b7fb0b9..1f3f39aa451ab 100644 --- a/providers/tests/common/sql/hooks/test_dbapi.py +++ b/providers/tests/common/sql/hooks/test_dbapi.py @@ -272,7 +272,7 @@ def test_get_uri_schema_not_none(self): port=1, ) ) - assert "conn-type://login:password@host:1/schema" == self.db_hook.get_uri() + assert self.db_hook.get_uri() == "conn-type://login:password@host:1/schema" def test_get_uri_schema_override(self): self.db_hook_schema_override.get_connection = mock.MagicMock( @@ -285,7 +285,7 @@ def test_get_uri_schema_override(self): port=1, ) ) - assert "conn-type://login:password@host:1/schema-override" == self.db_hook_schema_override.get_uri() + assert self.db_hook_schema_override.get_uri() == "conn-type://login:password@host:1/schema-override" def test_get_uri_schema_none(self): self.db_hook.get_connection = mock.MagicMock( @@ -293,7 +293,7 @@ def test_get_uri_schema_none(self): conn_type="conn-type", host="host", login="login", password="password", schema=None, port=1 ) ) - assert "conn-type://login:password@host:1" == self.db_hook.get_uri() + assert self.db_hook.get_uri() == "conn-type://login:password@host:1" def test_get_uri_special_characters(self): self.db_hook.get_connection = mock.MagicMock( @@ -307,7 +307,7 @@ def test_get_uri_special_characters(self): ) ) assert ( - "conn-type://lo%2Fgi%23%21%20n:pass%2A%21%20word%2F@host%2F:1/schema%2F" == self.db_hook.get_uri() + self.db_hook.get_uri() == "conn-type://lo%2Fgi%23%21%20n:pass%2A%21%20word%2F@host%2F:1/schema%2F" ) def test_get_uri_login_none(self): @@ -321,7 +321,7 @@ def test_get_uri_login_none(self): port=1, ) ) - assert "conn-type://:password@host:1/schema" == self.db_hook.get_uri() + assert self.db_hook.get_uri() == "conn-type://:password@host:1/schema" def test_get_uri_password_none(self): self.db_hook.get_connection = mock.MagicMock( @@ -334,7 +334,7 @@ def test_get_uri_password_none(self): port=1, ) ) - assert "conn-type://login@host:1/schema" == self.db_hook.get_uri() + assert self.db_hook.get_uri() == "conn-type://login@host:1/schema" def test_get_uri_authority_none(self): self.db_hook.get_connection = mock.MagicMock( @@ -347,7 +347,7 @@ def test_get_uri_authority_none(self): port=1, ) ) - assert "conn-type://host:1/schema" == self.db_hook.get_uri() + assert self.db_hook.get_uri() == "conn-type://host:1/schema" def test_get_uri_extra(self): self.db_hook.get_connection = mock.MagicMock( diff --git a/providers/tests/common/sql/sensors/test_sql.py b/providers/tests/common/sql/sensors/test_sql.py index 4940e54d3a7bd..add9c78dc7fe2 100644 --- a/providers/tests/common/sql/sensors/test_sql.py +++ b/providers/tests/common/sql/sensors/test_sql.py @@ -239,7 +239,7 @@ def test_sql_sensor_postgres_poke_invalid_failure(self, mock_hook): mock_get_records.return_value = [[1]] with pytest.raises(AirflowException) as ctx: op.poke({}) - assert "self.failure is present, but not callable -> [1]" == str(ctx.value) + assert str(ctx.value) == "self.failure is present, but not callable -> [1]" @mock.patch("airflow.providers.common.sql.sensors.sql.BaseHook") def test_sql_sensor_postgres_poke_invalid_success( @@ -259,7 +259,7 @@ def test_sql_sensor_postgres_poke_invalid_success( mock_get_records.return_value = [[1]] with pytest.raises(AirflowException) as ctx: op.poke({}) - assert "self.success is present, but not callable -> [1]" == str(ctx.value) + assert str(ctx.value) == "self.success is present, but not callable -> [1]" @pytest.mark.backend("postgres") def test_sql_sensor_postgres_with_selector(self): diff --git a/providers/tests/databricks/operators/test_databricks.py b/providers/tests/databricks/operators/test_databricks.py index 1cbe447667709..da3c697360ffe 100644 --- a/providers/tests/databricks/operators/test_databricks.py +++ b/providers/tests/databricks/operators/test_databricks.py @@ -500,7 +500,7 @@ def test_exec_create(self, db_mock_class): ) db_mock.create_job.assert_called_once_with(expected) - assert JOB_ID == return_result + assert return_result == JOB_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_reset(self, db_mock_class): @@ -554,7 +554,7 @@ def test_exec_reset(self, db_mock_class): ) db_mock.reset_job.assert_called_once_with(JOB_ID, expected) - assert JOB_ID == return_result + assert return_result == JOB_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_update_job_permission(self, db_mock_class): @@ -877,7 +877,7 @@ def test_exec_success(self, db_mock_class): db_mock.submit_run.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_pipeline_name(self, db_mock_class): @@ -906,7 +906,7 @@ def test_exec_pipeline_name(self, db_mock_class): db_mock.submit_run.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_failure(self, db_mock_class): @@ -942,7 +942,7 @@ def test_exec_failure(self, db_mock_class): db_mock.submit_run.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_on_kill(self, db_mock_class): @@ -1296,7 +1296,7 @@ def test_exec_success(self, db_mock_class): db_mock.run_now.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_failure(self, db_mock_class): @@ -1330,7 +1330,7 @@ def test_exec_failure(self, db_mock_class): db_mock.run_now.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_failure_with_message(self, db_mock_class): @@ -1386,7 +1386,7 @@ def test_exec_failure_with_message(self, db_mock_class): db_mock.run_now.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_multiple_failures_with_message(self, db_mock_class): @@ -1456,7 +1456,7 @@ def test_exec_multiple_failures_with_message(self, db_mock_class): db_mock.run_now.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_on_kill(self, db_mock_class): @@ -1576,7 +1576,7 @@ def test_exec_with_job_name(self, db_mock_class): db_mock.run_now.assert_called_once_with(expected) db_mock.get_run_page_url.assert_called_once_with(RUN_ID) db_mock.get_run.assert_called_once_with(RUN_ID) - assert RUN_ID == op.run_id + assert op.run_id == RUN_ID @mock.patch("airflow.providers.databricks.operators.databricks.DatabricksHook") def test_exec_failure_if_job_id_not_found(self, db_mock_class): diff --git a/providers/tests/dingding/hooks/test_dingding.py b/providers/tests/dingding/hooks/test_dingding.py index e2da44736be84..c779437caeb3f 100644 --- a/providers/tests/dingding/hooks/test_dingding.py +++ b/providers/tests/dingding/hooks/test_dingding.py @@ -44,7 +44,7 @@ def setup_method(self): def test_get_endpoint_conn_id(self): hook = DingdingHook(dingding_conn_id=self.conn_id) endpoint = hook._get_endpoint() - assert "robot/send?access_token=you_token_here" == endpoint + assert endpoint == "robot/send?access_token=you_token_here" def test_build_text_message_not_remind(self): config = { diff --git a/providers/tests/edge/cli/test_edge_command.py b/providers/tests/edge/cli/test_edge_command.py index 99d9c6db92b58..f6612b1a99a51 100644 --- a/providers/tests/edge/cli/test_edge_command.py +++ b/providers/tests/edge/cli/test_edge_command.py @@ -68,7 +68,7 @@ def test_write_pid_to_pidfile_created_by_crashed_instance(tmp_path): with patch("os.getpid", return_value=0): pid_file_path = tmp_path / "file.pid" _write_pid_to_pidfile(pid_file_path) - assert "0" == pid_file_path.read_text().strip() + assert pid_file_path.read_text().strip() == "0" # write a PID file with the current process ID, call should not raise an exception _write_pid_to_pidfile(pid_file_path) assert str(os.getpid()) == pid_file_path.read_text().strip() diff --git a/providers/tests/elasticsearch/log/test_es_task_handler.py b/providers/tests/elasticsearch/log/test_es_task_handler.py index 642f3fe4cdb19..1811c6eee63b1 100644 --- a/providers/tests/elasticsearch/log/test_es_task_handler.py +++ b/providers/tests/elasticsearch/log/test_es_task_handler.py @@ -206,12 +206,12 @@ def test_read(self, ti): ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert len(logs[0]) == 1 assert self.test_message == logs[0][0][-1] assert not metadatas[0]["end_of_log"] - assert "1" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "1" assert timezone.parse(metadatas[0]["last_log_timestamp"]) > ts def test_read_with_patterns(self, ti): @@ -221,12 +221,12 @@ def test_read_with_patterns(self, ti): ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert len(logs[0]) == 1 assert self.test_message == logs[0][0][-1] assert not metadatas[0]["end_of_log"] - assert "1" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "1" assert timezone.parse(metadatas[0]["last_log_timestamp"]) > ts def test_read_with_patterns_no_match(self, ti): @@ -236,11 +236,11 @@ def test_read_with_patterns_no_match(self, ti): ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) - assert [[]] == logs + assert logs == [[]] assert not metadatas[0]["end_of_log"] - assert "0" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "0" # last_log_timestamp won't change if no log lines read. assert timezone.parse(metadatas[0]["last_log_timestamp"]) == ts @@ -267,7 +267,7 @@ def test_read_missing_logs(self, seconds, create_task_instance): ts = pendulum.now().add(seconds=-seconds) logs, metadatas = self.es_task_handler.read(ti, 1, {"offset": 0, "last_log_timestamp": str(ts)}) - assert 1 == len(logs) + assert len(logs) == 1 if seconds > 5: # we expect a log not found message when checking began more than 5 seconds ago assert len(logs[0]) == 1 @@ -281,7 +281,7 @@ def test_read_missing_logs(self, seconds, create_task_instance): assert logs == [[]] assert metadatas[0]["end_of_log"] is False assert len(logs) == len(metadatas) - assert "0" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "0" assert timezone.parse(metadatas[0]["last_log_timestamp"]) == ts def test_read_with_match_phrase_query(self, ti): @@ -298,22 +298,22 @@ def test_read_with_match_phrase_query(self, ti): logs, metadatas = self.es_task_handler.read( ti, 1, {"offset": "0", "last_log_timestamp": str(ts), "end_of_log": False, "max_offset": 2} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert self.test_message == logs[0][0][-1] assert another_test_message != logs[0] assert not metadatas[0]["end_of_log"] - assert "1" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "1" assert timezone.parse(metadatas[0]["last_log_timestamp"]) > ts def test_read_with_none_metadata(self, ti): logs, metadatas = self.es_task_handler.read(ti, 1) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert self.test_message == logs[0][0][-1] assert not metadatas[0]["end_of_log"] - assert "1" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "1" assert timezone.parse(metadatas[0]["last_log_timestamp"]) < pendulum.now() def test_read_nonexistent_log(self, ti): @@ -325,23 +325,23 @@ def test_read_nonexistent_log(self, ti): logs, metadatas = self.es_task_handler.read( ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) - assert [[]] == logs + assert logs == [[]] assert not metadatas[0]["end_of_log"] - assert "0" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "0" # last_log_timestamp won't change if no log lines read. assert timezone.parse(metadatas[0]["last_log_timestamp"]) == ts def test_read_with_empty_metadata(self, ti): ts = pendulum.now() logs, metadatas = self.es_task_handler.read(ti, 1, {}) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert self.test_message == logs[0][0][-1] assert not metadatas[0]["end_of_log"] # offset should be initialized to 0 if not provided. - assert "1" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "1" # last_log_timestamp will be initialized using log reading time # if not last_log_timestamp is provided. assert timezone.parse(metadatas[0]["last_log_timestamp"]) > ts @@ -349,12 +349,12 @@ def test_read_with_empty_metadata(self, ti): # case where offset is missing but metadata not empty. self.es.delete(index=self.index_name, doc_type=self.doc_type, id=1) logs, metadatas = self.es_task_handler.read(ti, 1, {"end_of_log": False}) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) - assert [[]] == logs + assert logs == [[]] assert not metadatas[0]["end_of_log"] # offset should be initialized to 0 if not provided. - assert "0" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "0" # last_log_timestamp will be initialized using log reading time # if not last_log_timestamp is provided. assert timezone.parse(metadatas[0]["last_log_timestamp"]) > ts @@ -376,9 +376,9 @@ def test_read_timeout(self, ti): "end_of_log": False, }, ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) - assert [[]] == logs + assert logs == [[]] assert metadatas[0]["end_of_log"] assert str(offset) == metadatas[0]["offset"] assert timezone.parse(metadatas[0]["last_log_timestamp"]) == ts @@ -390,13 +390,13 @@ def test_read_as_download_logs(self, ti): 1, {"offset": 0, "last_log_timestamp": str(ts), "download_logs": True, "end_of_log": False}, ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert len(logs[0]) == 1 assert self.test_message == logs[0][0][-1] assert not metadatas[0]["end_of_log"] assert metadatas[0]["download_logs"] - assert "1" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "1" assert timezone.parse(metadatas[0]["last_log_timestamp"]) > ts def test_read_raises(self, ti): @@ -407,11 +407,11 @@ def test_read_raises(self, ti): assert mock_exception.call_count == 1 args, kwargs = mock_exception.call_args assert "Could not read log with log_id:" in args[0] - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) - assert [[]] == logs + assert logs == [[]] assert not metadatas[0]["end_of_log"] - assert "0" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "0" def test_set_context(self, ti): self.es_task_handler.set_context(ti) @@ -447,7 +447,7 @@ def test_read_with_json_format(self, ti): logs, _ = self.es_task_handler.read( ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert "[2020-12-24 19:25:00,962] {taskinstance.py:851} INFO - some random stuff - " == logs[0][0][1] + assert logs[0][0][1] == "[2020-12-24 19:25:00,962] {taskinstance.py:851} INFO - some random stuff - " def test_read_with_json_format_with_custom_offset_and_host_fields(self, ti): ts = pendulum.now() @@ -475,7 +475,7 @@ def test_read_with_json_format_with_custom_offset_and_host_fields(self, ti): logs, _ = self.es_task_handler.read( ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert "[2020-12-24 19:25:00,962] {taskinstance.py:851} INFO - some random stuff - " == logs[0][0][1] + assert logs[0][0][1] == "[2020-12-24 19:25:00,962] {taskinstance.py:851} INFO - some random stuff - " def test_read_with_custom_offset_and_host_fields(self, ti): ts = pendulum.now() @@ -531,7 +531,7 @@ def test_close_closed(self, ti): with open( os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: - assert 0 == len(log_file.read()) + assert len(log_file.read()) == 0 def test_close_with_no_handler(self, ti): self.es_task_handler.set_context(ti) @@ -540,7 +540,7 @@ def test_close_with_no_handler(self, ti): with open( os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: - assert 0 == len(log_file.read()) + assert len(log_file.read()) == 0 assert self.es_task_handler.closed def test_close_with_no_stream(self, ti): @@ -563,14 +563,14 @@ def test_close_with_no_stream(self, ti): assert self.es_task_handler.closed def test_render_log_id(self, ti): - assert self.LOG_ID == self.es_task_handler._render_log_id(ti, 1) + assert self.es_task_handler._render_log_id(ti, 1) == self.LOG_ID self.es_task_handler.json_format = True - assert self.JSON_LOG_ID == self.es_task_handler._render_log_id(ti, 1) + assert self.es_task_handler._render_log_id(ti, 1) == self.JSON_LOG_ID def test_clean_date(self): clean_logical_date = self.es_task_handler._clean_date(datetime(2016, 7, 8, 9, 10, 11, 12)) - assert "2016_07_08T09_10_11_000012" == clean_logical_date + assert clean_logical_date == "2016_07_08T09_10_11_000012" @pytest.mark.parametrize( "json_format, es_frontend, expected_url", diff --git a/providers/tests/exasol/hooks/test_exasol.py b/providers/tests/exasol/hooks/test_exasol.py index f208450826487..1c58431ea693f 100644 --- a/providers/tests/exasol/hooks/test_exasol.py +++ b/providers/tests/exasol/hooks/test_exasol.py @@ -150,7 +150,7 @@ def test_bulk_dump(self): self.db_hook.bulk_dump("table", "/tmp/file") def test_serialize_cell(self): - assert "foo" == self.db_hook._serialize_cell("foo", None) + assert self.db_hook._serialize_cell("foo", None) == "foo" def test_export_to_file(self): file_name = "file_name" diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py index e009faefa42c4..b670063c7b3b2 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py @@ -138,12 +138,12 @@ def test_should_respond_404(self): ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Queue event with dag_id: `not_exists` and asset uri: `not_exists` was not found", "status": 404, "title": "Queue event not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } class TestDeleteDagAssetQueuedEvent(TestAssetEndpoint): @@ -179,12 +179,12 @@ def test_should_respond_404(self): ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Queue event with dag_id: `not_exists` and asset uri: `not_exists` was not found", "status": 404, "title": "Queue event not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } class TestGetDagAssetQueuedEvents(TestQueuedEventEndpoint): @@ -221,12 +221,12 @@ def test_should_respond_404(self): ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Queue event with dag_id: `not_exists` was not found", "status": 404, "title": "Queue event not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } class TestDeleteDagDatasetQueuedEvents(TestAssetEndpoint): @@ -239,12 +239,12 @@ def test_should_respond_404(self): ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Queue event with dag_id: `not_exists` was not found", "status": 404, "title": "Queue event not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } class TestGetDatasetQueuedEvents(TestQueuedEventEndpoint): @@ -282,12 +282,12 @@ def test_should_respond_404(self): ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Queue event with asset uri: `not_exists` was not found", "status": 404, "title": "Queue event not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } class TestDeleteDatasetQueuedEvents(TestQueuedEventEndpoint): @@ -317,9 +317,9 @@ def test_should_respond_404(self): ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Queue event with asset uri: `not_exists` was not found", "status": 404, "title": "Queue event not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py index aa00b49ad6d33..66da26f76bf9f 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py @@ -120,12 +120,12 @@ def test_should_return_200_with_single_dag_read(self, session): assert response.status_code == 200 response_data = response.json response_data["import_error_id"] = 1 - assert { + assert response_data == { "filename": "Lorem_ipsum.py", "import_error_id": 1, "stack_trace": "Lorem ipsum", "timestamp": "2020-06-10T12:00:00+00:00", - } == response_data + } def test_should_return_200_redacted_with_single_dag_read_in_dagfile(self, session): for dag_id in TEST_DAG_IDS: @@ -146,12 +146,12 @@ def test_should_return_200_redacted_with_single_dag_read_in_dagfile(self, sessio assert response.status_code == 200 response_data = response.json response_data["import_error_id"] = 1 - assert { + assert response_data == { "filename": "Lorem_ipsum.py", "import_error_id": 1, "stack_trace": "REDACTED - you do not have read permission on all DAGs in the file", "timestamp": "2020-06-10T12:00:00+00:00", - } == response_data + } class TestGetImportErrorsEndpoint(TestBaseImportError): @@ -175,7 +175,7 @@ def test_get_import_errors_single_dag(self, session): assert response.status_code == 200 response_data = response.json self._normalize_import_errors(response_data["import_errors"]) - assert { + assert response_data == { "import_errors": [ { "filename": "/tmp/test_dag.py", @@ -185,7 +185,7 @@ def test_get_import_errors_single_dag(self, session): }, ], "total_entries": 1, - } == response_data + } def test_get_import_errors_single_dag_in_dagfile(self, session): for dag_id in TEST_DAG_IDS: @@ -208,7 +208,7 @@ def test_get_import_errors_single_dag_in_dagfile(self, session): assert response.status_code == 200 response_data = response.json self._normalize_import_errors(response_data["import_errors"]) - assert { + assert response_data == { "import_errors": [ { "filename": "/tmp/all_in_one.py", @@ -218,4 +218,4 @@ def test_get_import_errors_single_dag_in_dagfile(self, session): }, ], "total_entries": 1, - } == response_data + } diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py index 98451a894ca76..b2e259fee28d1 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py @@ -89,12 +89,12 @@ def test_should_respond_404(self): "/auth/fab/v1/roles/invalid-role", environ_overrides={"REMOTE_USER": "test"} ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Role with name 'invalid-role' was not found", "status": 404, "title": "Role not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self): response = self.client.get("/auth/fab/v1/roles/Admin") diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py index d1830359556e2..7801fdf08111c 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py @@ -211,12 +211,12 @@ def test_should_respond_404(self): "/auth/fab/v1/users/invalid-user", environ_overrides={"REMOTE_USER": "test"} ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "The User with username `invalid-user` was not found", "status": 404, "title": "User not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self): response = self.client.get("/auth/fab/v1/users/TEST_USER1") diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py index 4b909e380062c..545f40c91dddc 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py @@ -141,7 +141,7 @@ def test_should_respond_200_with_tilde_and_granular_dag_access(self): environ_overrides={"REMOTE_USER": "test_granular_permissions"}, ) - assert 200 == response.status_code + assert response.status_code == 200 response_data = response.json for xcom_entry in response_data["xcom_entries"]: xcom_entry["timestamp"] = "TIMESTAMP" diff --git a/providers/tests/fab/auth_manager/security_manager/test_constants.py b/providers/tests/fab/auth_manager/security_manager/test_constants.py index dbe592c59d747..401b9b108262c 100644 --- a/providers/tests/fab/auth_manager/security_manager/test_constants.py +++ b/providers/tests/fab/auth_manager/security_manager/test_constants.py @@ -24,10 +24,10 @@ class TestFbSecurityManagerConstants: def test_existing_roles(self): - assert EXISTING_ROLES == { + assert { "Admin", "Viewer", "User", "Op", "Public", - } + } == EXISTING_ROLES diff --git a/providers/tests/fab/auth_manager/test_security.py b/providers/tests/fab/auth_manager/test_security.py index a4e2714ab96d9..827ed4e7f3bf6 100644 --- a/providers/tests/fab/auth_manager/test_security.py +++ b/providers/tests/fab/auth_manager/test_security.py @@ -900,7 +900,7 @@ def test_no_additional_dag_permission_views_created(db, security_manager): def test_override_role_vm(app_builder): test_security_manager = MockSecurityManager(appbuilder=app_builder) assert len(test_security_manager.VIEWER_VMS) == 1 - assert test_security_manager.VIEWER_VMS == {"Airflow"} + assert {"Airflow"} == test_security_manager.VIEWER_VMS def test_correct_roles_have_perms_to_read_config(security_manager): diff --git a/providers/tests/google/cloud/_internal_client/test_secret_manager_client.py b/providers/tests/google/cloud/_internal_client/test_secret_manager_client.py index 86748dc9a0fcb..228d9d6d52bcd 100644 --- a/providers/tests/google/cloud/_internal_client/test_secret_manager_client.py +++ b/providers/tests/google/cloud/_internal_client/test_secret_manager_client.py @@ -86,7 +86,7 @@ def test_get_existing_key(self, mock_secrets_client): secrets_client = _SecretManagerClient(credentials="credentials") secret = secrets_client.get_secret(secret_id="existing", project_id="project_id") mock_client.secret_version_path.assert_called_once_with("project_id", "existing", "latest") - assert "result" == secret + assert secret == "result" mock_client.access_secret_version.assert_called_once_with(request={"name": "full-path"}) @mock.patch(INTERNAL_CLIENT_MODULE + ".SecretManagerServiceClient") @@ -102,5 +102,5 @@ def test_get_existing_key_with_version(self, mock_secrets_client): secret_id="existing", project_id="project_id", secret_version="test-version" ) mock_client.secret_version_path.assert_called_once_with("project_id", "existing", "test-version") - assert "result" == secret + assert secret == "result" mock_client.access_secret_version.assert_called_once_with(request={"name": "full-path"}) diff --git a/providers/tests/google/cloud/hooks/test_bigquery.py b/providers/tests/google/cloud/hooks/test_bigquery.py index ee0f904bb94b6..3c650d432e532 100644 --- a/providers/tests/google/cloud/hooks/test_bigquery.py +++ b/providers/tests/google/cloud/hooks/test_bigquery.py @@ -1044,7 +1044,7 @@ def test_close(self, mock_get_client): def test_rowcount(self, mock_get_client): bq_cursor = self.hook.get_cursor() result = bq_cursor.rowcount - assert -1 == result + assert result == -1 @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_client") @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryCursor.next") @@ -1061,7 +1061,7 @@ def test_fetchone(self, mock_next, mock_get_client): def test_fetchall(self, mock_fetchone, mock_get_client): bq_cursor = self.hook.get_cursor() result = bq_cursor.fetchall() - assert [1, 2, 3] == result + assert result == [1, 2, 3] @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_client") @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryCursor.fetchone") @@ -1070,15 +1070,15 @@ def test_fetchmany(self, mock_fetchone, mock_get_client): bq_cursor = self.hook.get_cursor() mock_fetchone.side_effect = side_effect_values result = bq_cursor.fetchmany() - assert [1] == result + assert result == [1] mock_fetchone.side_effect = side_effect_values result = bq_cursor.fetchmany(2) - assert [1, 2] == result + assert result == [1, 2] mock_fetchone.side_effect = side_effect_values result = bq_cursor.fetchmany(5) - assert [1, 2, 3] == result + assert result == [1, 2, 3] @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_client") def test_next_no_jobid(self, mock_get_client): @@ -1093,9 +1093,9 @@ def test_next_buffer(self, mock_get_client): bq_cursor.job_id = JOB_ID bq_cursor.buffer = [1, 2] result = bq_cursor.next() - assert 1 == result + assert result == 1 result = bq_cursor.next() - assert 2 == result + assert result == 2 bq_cursor.all_pages_loaded = True result = bq_cursor.next() assert result is None @@ -1123,10 +1123,10 @@ def test_next(self, mock_build): bq_cursor.location = LOCATION result = bq_cursor.next() - assert ["one", 1] == result + assert result == ["one", 1] result = bq_cursor.next() - assert ["two", 2] == result + assert result == ["two", 2] mock_get_query_results.assert_called_once_with( jobId=JOB_ID, location=LOCATION, pageToken=None, projectId="bq-project" diff --git a/providers/tests/google/cloud/hooks/test_cloud_sql.py b/providers/tests/google/cloud/hooks/test_cloud_sql.py index 13e9d3f574696..4eb4c380cfef3 100644 --- a/providers/tests/google/cloud/hooks/test_cloud_sql.py +++ b/providers/tests/google/cloud/hooks/test_cloud_sql.py @@ -93,7 +93,7 @@ def test_instance_import_exception(self, mock_get_credentials): self.cloudsql_hook.import_instance(instance="instance", body={}) err = ctx.value assert "Importing instance " in str(err) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -106,8 +106,8 @@ def test_instance_export_exception(self, mock_get_credentials): with pytest.raises(HttpError) as ctx: self.cloudsql_hook.export_instance(instance="instance", body={}) err = ctx.value - assert 400 == err.resp.status - assert 1 == mock_get_credentials.call_count + assert err.resp.status == 400 + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -127,7 +127,7 @@ def test_instance_import(self, wait_for_operation_to_complete, get_conn, mock_ge wait_for_operation_to_complete.assert_called_once_with( project_id="example-project", operation_name="operation_id" ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -144,7 +144,7 @@ def test_instance_export(self, wait_for_operation_to_complete, get_conn, mock_ge export_method.assert_called_once_with(body={}, instance="instance", project="example-project") execute_method.assert_called_once_with(num_retries=5) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn") @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete") @@ -175,11 +175,11 @@ def test_get_instance(self, wait_for_operation_to_complete, get_conn, mock_get_c wait_for_operation_to_complete.return_value = None res = self.cloudsql_hook.get_instance(instance="instance") assert res is not None - assert "instance" == res["name"] + assert res["name"] == "instance" get_method.assert_called_once_with(instance="instance", project="example-project") execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_not_called() - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -199,7 +199,7 @@ def test_create_instance(self, wait_for_operation_to_complete, get_conn, mock_ge wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -222,9 +222,9 @@ def test_create_instance_with_in_progress_retry( wait_for_operation_to_complete.return_value = None self.cloudsql_hook.create_instance(body={}) - assert 1 == mock_get_credentials.call_count - assert 2 == insert_method.call_count - assert 2 == execute_method.call_count + assert mock_get_credentials.call_count == 1 + assert insert_method.call_count == 2 + assert execute_method.call_count == 2 wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) @@ -250,9 +250,9 @@ def test_patch_instance_with_in_progress_retry( wait_for_operation_to_complete.return_value = None self.cloudsql_hook.patch_instance(instance="instance", body={}) - assert 1 == mock_get_credentials.call_count - assert 2 == patch_method.call_count - assert 2 == execute_method.call_count + assert mock_get_credentials.call_count == 1 + assert patch_method.call_count == 2 + assert execute_method.call_count == 2 wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) @@ -275,7 +275,7 @@ def test_patch_instance(self, wait_for_operation_to_complete, get_conn, mock_get wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -295,7 +295,7 @@ def test_delete_instance(self, wait_for_operation_to_complete, get_conn, mock_ge wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project", time_to_sleep=5 ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -318,9 +318,9 @@ def test_delete_instance_with_in_progress_retry( wait_for_operation_to_complete.return_value = None self.cloudsql_hook.delete_instance(instance="instance") - assert 1 == mock_get_credentials.call_count - assert 2 == delete_method.call_count - assert 2 == execute_method.call_count + assert mock_get_credentials.call_count == 1 + assert delete_method.call_count == 2 + assert execute_method.call_count == 2 wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project", time_to_sleep=5 ) @@ -349,7 +349,7 @@ def test_instance_clone(self, wait_for_operation_to_complete, get_conn, mock_get wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -364,13 +364,13 @@ def test_get_database(self, wait_for_operation_to_complete, get_conn, mock_get_c wait_for_operation_to_complete.return_value = None res = self.cloudsql_hook.get_database(database="database", instance="instance") assert res is not None - assert "database" == res["name"] + assert res["name"] == "database" get_method.assert_called_once_with( instance="instance", database="database", project="example-project" ) execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_not_called() - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -390,7 +390,7 @@ def test_create_database(self, wait_for_operation_to_complete, get_conn, mock_ge wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -413,9 +413,9 @@ def test_create_database_with_in_progress_retry( wait_for_operation_to_complete.return_value = None self.cloudsql_hook.create_database(instance="instance", body={}) - assert 1 == mock_get_credentials.call_count - assert 2 == insert_method.call_count - assert 2 == execute_method.call_count + assert mock_get_credentials.call_count == 1 + assert insert_method.call_count == 2 + assert execute_method.call_count == 2 wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) @@ -440,7 +440,7 @@ def test_patch_database(self, wait_for_operation_to_complete, get_conn, mock_get wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -463,9 +463,9 @@ def test_patch_database_with_in_progress_retry( wait_for_operation_to_complete.return_value = None self.cloudsql_hook.patch_database(instance="instance", database="database", body={}) - assert 1 == mock_get_credentials.call_count - assert 2 == patch_method.call_count - assert 2 == execute_method.call_count + assert mock_get_credentials.call_count == 1 + assert patch_method.call_count == 2 + assert execute_method.call_count == 2 wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) @@ -490,7 +490,7 @@ def test_delete_database(self, wait_for_operation_to_complete, get_conn, mock_ge wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) - assert 1 == mock_get_credentials.call_count + assert mock_get_credentials.call_count == 1 @mock.patch( "airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id", @@ -513,9 +513,9 @@ def test_delete_database_with_in_progress_retry( wait_for_operation_to_complete.return_value = None self.cloudsql_hook.delete_database(instance="instance", database="database") - assert 1 == mock_get_credentials.call_count - assert 2 == delete_method.call_count - assert 2 == execute_method.call_count + assert mock_get_credentials.call_count == 1 + assert delete_method.call_count == 2 + assert execute_method.call_count == 2 wait_for_operation_to_complete.assert_called_once_with( operation_name="operation_id", project_id="example-project" ) @@ -590,7 +590,7 @@ def test_get_instance_overridden_project_id( project_id="example-project", instance="instance" ) assert res is not None - assert "instance" == res["name"] + assert res["name"] == "instance" get_method.assert_called_once_with(instance="instance", project="example-project") execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_not_called() @@ -680,7 +680,7 @@ def test_get_database_overridden_project_id( project_id="example-project", database="database", instance="instance" ) assert res is not None - assert "database" == res["name"] + assert res["name"] == "database" get_method.assert_called_once_with( instance="instance", database="database", project="example-project" ) @@ -1443,17 +1443,17 @@ def test_hook_with_not_too_long_unix_socket_path(self, get_connection): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() - assert "postgres" == connection.conn_type - assert "testdb" == connection.schema + assert connection.conn_type == "postgres" + assert connection.schema == "testdb" def _verify_postgres_connection(self, get_connection, uri): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() - assert "postgres" == connection.conn_type - assert "127.0.0.1" == connection.host - assert 3200 == connection.port - assert "testdb" == connection.schema + assert connection.conn_type == "postgres" + assert connection.host == "127.0.0.1" + assert connection.port == 3200 + assert connection.schema == "testdb" return connection @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") @@ -1479,9 +1479,9 @@ def side_effect_func(cert_name, cert_path, cert_value): "sslkey=/bin/bash&sslrootcert=/bin/bash" ) connection = self._verify_postgres_connection(get_connection, uri) - assert "/tmp/sslkey" == connection.extra_dejson["sslkey"] - assert "/tmp/sslcert" == connection.extra_dejson["sslcert"] - assert "/tmp/sslrootcert" == connection.extra_dejson["sslrootcert"] + assert connection.extra_dejson["sslkey"] == "/tmp/sslkey" + assert connection.extra_dejson["sslcert"] == "/tmp/sslcert" + assert connection.extra_dejson["sslrootcert"] == "/tmp/sslrootcert" @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_postgres_proxy_socket(self, get_connection): @@ -1493,11 +1493,11 @@ def test_hook_with_correct_parameters_postgres_proxy_socket(self, get_connection get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() - assert "postgres" == connection.conn_type + assert connection.conn_type == "postgres" assert tempfile.gettempdir() in connection.host assert "example-project:europe-west1:testdb" in connection.host assert connection.port is None - assert "testdb" == connection.schema + assert connection.schema == "testdb" @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_project_id_missing(self, get_connection): @@ -1512,10 +1512,10 @@ def verify_mysql_connection(self, get_connection, uri): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() - assert "mysql" == connection.conn_type - assert "127.0.0.1" == connection.host - assert 3200 == connection.port - assert "testdb" == connection.schema + assert connection.conn_type == "mysql" + assert connection.host == "127.0.0.1" + assert connection.port == 3200 + assert connection.schema == "testdb" return connection @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") @@ -1528,10 +1528,10 @@ def test_hook_with_correct_parameters_postgres_proxy_tcp(self, get_connection): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() - assert "postgres" == connection.conn_type - assert "127.0.0.1" == connection.host - assert 3200 != connection.port - assert "testdb" == connection.schema + assert connection.conn_type == "postgres" + assert connection.host == "127.0.0.1" + assert connection.port != 3200 + assert connection.schema == "testdb" @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_mysql(self, get_connection): @@ -1556,9 +1556,9 @@ def side_effect_func(cert_name, cert_path, cert_value): "sslkey=/bin/bash&sslrootcert=/bin/bash" ) connection = self.verify_mysql_connection(get_connection, uri) - assert "/tmp/sslcert" == json.loads(connection.extra_dejson["ssl"])["cert"] - assert "/tmp/sslkey" == json.loads(connection.extra_dejson["ssl"])["key"] - assert "/tmp/sslrootcert" == json.loads(connection.extra_dejson["ssl"])["ca"] + assert json.loads(connection.extra_dejson["ssl"])["cert"] == "/tmp/sslcert" + assert json.loads(connection.extra_dejson["ssl"])["key"] == "/tmp/sslkey" + assert json.loads(connection.extra_dejson["ssl"])["ca"] == "/tmp/sslrootcert" @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_mysql_proxy_socket(self, get_connection): @@ -1570,12 +1570,12 @@ def test_hook_with_correct_parameters_mysql_proxy_socket(self, get_connection): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() - assert "mysql" == connection.conn_type - assert "localhost" == connection.host + assert connection.conn_type == "mysql" + assert connection.host == "localhost" assert tempfile.gettempdir() in connection.extra_dejson["unix_socket"] assert "example-project:europe-west1:testdb" in connection.extra_dejson["unix_socket"] assert connection.port is None - assert "testdb" == connection.schema + assert connection.schema == "testdb" @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_mysql_tcp(self, get_connection): @@ -1587,10 +1587,10 @@ def test_hook_with_correct_parameters_mysql_tcp(self, get_connection): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() - assert "mysql" == connection.conn_type - assert "127.0.0.1" == connection.host - assert 3200 != connection.port - assert "testdb" == connection.schema + assert connection.conn_type == "mysql" + assert connection.host == "127.0.0.1" + assert connection.port != 3200 + assert connection.schema == "testdb" def get_processor(): diff --git a/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py index 1a490dc9ca993..c3d9dc414b58f 100644 --- a/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py +++ b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py @@ -210,7 +210,7 @@ def test_get_transfer_job(self, get_conn): execute_method.return_value = TEST_TRANSFER_JOB res = self.gct_hook.get_transfer_job(job_name=TEST_TRANSFER_JOB_NAME, project_id=TEST_PROJECT_ID) assert res is not None - assert TEST_TRANSFER_JOB_NAME == res[NAME] + assert res[NAME] == TEST_TRANSFER_JOB_NAME get_method.assert_called_once_with(jobName=TEST_TRANSFER_JOB_NAME, projectId=TEST_PROJECT_ID) execute_method.assert_called_once_with(num_retries=5) @@ -603,7 +603,7 @@ def test_get_transfer_job(self, get_conn, mock_project_id): execute_method.return_value = TEST_TRANSFER_JOB res = self.gct_hook.get_transfer_job(job_name=TEST_TRANSFER_JOB_NAME) assert res is not None - assert TEST_TRANSFER_JOB_NAME == res[NAME] + assert res[NAME] == TEST_TRANSFER_JOB_NAME get_method.assert_called_once_with(jobName=TEST_TRANSFER_JOB_NAME, projectId="example-project") execute_method.assert_called_once_with(num_retries=5) @@ -788,9 +788,9 @@ def test_create_transfer_job(self, get_conn, mock_project_id): self.gct_hook.create_transfer_job(body=_without_key(TEST_BODY, PROJECT_ID)) assert ( - "The project id must be passed either as `projectId` key in `body` " + str(ctx.value) == "The project id must be passed either as `projectId` key in `body` " "parameter or as project_id " - "extra in Google Cloud connection definition. Both are not set!" == str(ctx.value) + "extra in Google Cloud connection definition. Both are not set!" ) @mock.patch( @@ -809,9 +809,9 @@ def test_get_transfer_job(self, get_conn, mock_project_id): with pytest.raises(AirflowException) as ctx: self.gct_hook.get_transfer_job(job_name=TEST_TRANSFER_JOB_NAME) assert ( - "The project id must be passed either as keyword project_id " + str(ctx.value) == "The project id must be passed either as keyword project_id " "parameter or as project_id extra in Google Cloud connection definition. " - "Both are not set!" == str(ctx.value) + "Both are not set!" ) @mock.patch( @@ -837,8 +837,9 @@ def test_list_transfer_job(self, get_conn, mock_project_id): ) assert ( - "The project id must be passed either as `project_id` key in `filter` parameter or as " - "project_id extra in Google Cloud connection definition. Both are not set!" == str(ctx.value) + str(ctx.value) + == "The project id must be passed either as `project_id` key in `filter` parameter or as " + "project_id extra in Google Cloud connection definition. Both are not set!" ) @mock.patch( @@ -882,8 +883,9 @@ def test_update_transfer_job(self, get_conn, mock_project_id): ) assert ( - "The project id must be passed either as `projectId` key in `body` parameter or as project_id " - "extra in Google Cloud connection definition. Both are not set!" == str(ctx.value) + str(ctx.value) + == "The project id must be passed either as `projectId` key in `body` parameter or as project_id " + "extra in Google Cloud connection definition. Both are not set!" ) @mock.patch( @@ -900,8 +902,9 @@ def test_delete_transfer_job(self, get_conn, mock_project_id): self.gct_hook.delete_transfer_job(job_name=TEST_TRANSFER_JOB_NAME) assert ( - "The project id must be passed either as keyword project_id parameter or as project_id extra in " - "Google Cloud connection definition. Both are not set!" == str(ctx.value) + str(ctx.value) + == "The project id must be passed either as keyword project_id parameter or as project_id extra in " + "Google Cloud connection definition. Both are not set!" ) @mock.patch( @@ -927,6 +930,7 @@ def test_list_transfer_operation(self, get_conn, mock_project_id): ) assert ( - "The project id must be passed either as `project_id` key in `filter` parameter or as project_id " - "extra in Google Cloud connection definition. Both are not set!" == str(ctx.value) + str(ctx.value) + == "The project id must be passed either as `project_id` key in `filter` parameter or as project_id " + "extra in Google Cloud connection definition. Both are not set!" ) diff --git a/providers/tests/google/cloud/hooks/test_compute_ssh.py b/providers/tests/google/cloud/hooks/test_compute_ssh.py index 6065ba0eacce7..ea1c43681e0f8 100644 --- a/providers/tests/google/cloud/hooks/test_compute_ssh.py +++ b/providers/tests/google/cloud/hooks/test_compute_ssh.py @@ -461,7 +461,7 @@ class CustomException(Exception): hook = ComputeEngineSSHHook(instance_name=TEST_INSTANCE_NAME, zone=TEST_ZONE) hook.get_conn() - assert 3 == mock_ssh_client.return_value.connect.call_count + assert mock_ssh_client.return_value.connect.call_count == 3 def test_read_configuration_from_connection(self): conn = Connection( @@ -483,16 +483,16 @@ def test_read_configuration_from_connection(self): with mock.patch.dict("os.environ", AIRFLOW_CONN_GCPSSH=conn_uri): hook = ComputeEngineSSHHook(gcp_conn_id="gcpssh") hook._load_connection_config() - assert "conn-instance-name" == hook.instance_name - assert "conn-host" == hook.hostname - assert "conn-user" == hook.user + assert hook.instance_name == "conn-instance-name" + assert hook.hostname == "conn-host" + assert hook.user == "conn-user" assert hook.use_internal_ip is True assert isinstance(hook.use_internal_ip, bool) assert hook.use_iap_tunnel is True assert isinstance(hook.use_iap_tunnel, bool) assert hook.use_oslogin is False assert isinstance(hook.use_oslogin, bool) - assert 4242 == hook.expire_time + assert hook.expire_time == 4242 assert isinstance(hook.expire_time, int) def test_read_configuration_from_connection_empty_config(self): @@ -506,14 +506,14 @@ def test_read_configuration_from_connection_empty_config(self): hook._load_connection_config() assert None is hook.instance_name assert None is hook.hostname - assert "root" == hook.user + assert hook.user == "root" assert False is hook.use_internal_ip assert isinstance(hook.use_internal_ip, bool) assert False is hook.use_iap_tunnel assert isinstance(hook.use_iap_tunnel, bool) assert False is hook.use_oslogin assert isinstance(hook.use_oslogin, bool) - assert 300 == hook.expire_time + assert hook.expire_time == 300 assert isinstance(hook.expire_time, int) @pytest.mark.parametrize( diff --git a/providers/tests/google/cloud/hooks/test_dataflow.py b/providers/tests/google/cloud/hooks/test_dataflow.py index eb69952e47cb5..24408fdc56656 100644 --- a/providers/tests/google/cloud/hooks/test_dataflow.py +++ b/providers/tests/google/cloud/hooks/test_dataflow.py @@ -1394,7 +1394,7 @@ def test_dataflow_job_wait_for_multiple_jobs_and_streaming_jobs(self): ) dataflow_job.wait_for_done() - assert 1 == mock_jobs_list.call_count + assert mock_jobs_list.call_count == 1 def test_dataflow_job_wait_for_single_jobs(self): job = { diff --git a/providers/tests/google/cloud/hooks/test_dataproc.py b/providers/tests/google/cloud/hooks/test_dataproc.py index b8236f72d11a7..b15d7855454e9 100644 --- a/providers/tests/google/cloud/hooks/test_dataproc.py +++ b/providers/tests/google/cloud/hooks/test_dataproc.py @@ -1084,7 +1084,7 @@ def test_add_labels(self): labels = {"key": "value"} self.builder.add_labels(labels) assert "key" in self.builder.job["job"]["labels"] - assert "value" == self.builder.job["job"]["labels"]["key"] + assert self.builder.job["job"]["labels"]["key"] == "value" def test_add_variables(self): variables = ["variable"] @@ -1099,7 +1099,7 @@ def test_add_args(self): def test_add_query(self): query = ["query"] self.builder.add_query(query) - assert {"queries": [query]} == self.builder.job["job"][self.job_type]["query_list"] + assert self.builder.job["job"][self.job_type]["query_list"] == {"queries": [query]} def test_add_query_uri(self): query_uri = "query_uri" diff --git a/providers/tests/google/cloud/hooks/test_functions.py b/providers/tests/google/cloud/hooks/test_functions.py index e52855caf84d2..03403f51ee916 100644 --- a/providers/tests/google/cloud/hooks/test_functions.py +++ b/providers/tests/google/cloud/hooks/test_functions.py @@ -86,7 +86,7 @@ def test_upload_function_zip_overridden_project_id(self, get_conn, requests_put) res = self.gcf_function_hook_no_project_id.upload_function_zip( project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST, location=GCF_LOCATION, zip_path="/tmp/path.zip" ) - assert "http://uploadHere" == res + assert res == "http://uploadHere" generate_upload_url_method.assert_called_once_with( parent="projects/example-project/locations/location" ) @@ -170,7 +170,7 @@ def test_get_function(self, get_conn): execute_method.return_value = {"name": "function"} res = self.gcf_function_hook.get_function(name=GCF_FUNCTION) assert res is not None - assert "function" == res["name"] + assert res["name"] == "function" get_method.assert_called_once_with(name="function") execute_method.assert_called_once_with(num_retries=5) @@ -227,7 +227,7 @@ def test_upload_function_zip(self, get_conn, requests_put, mock_project_id): zip_path="/tmp/path.zip", project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST, ) - assert "http://uploadHere" == res + assert res == "http://uploadHere" generate_upload_url_method.assert_called_once_with( parent="projects/example-project/locations/location" ) @@ -251,7 +251,7 @@ def test_upload_function_zip_overridden_project_id(self, get_conn, requests_put) res = self.gcf_function_hook.upload_function_zip( project_id="new-project", location=GCF_LOCATION, zip_path="/tmp/path.zip" ) - assert "http://uploadHere" == res + assert res == "http://uploadHere" generate_upload_url_method.assert_called_once_with( parent="projects/new-project/locations/location" ) diff --git a/providers/tests/google/cloud/hooks/test_kms.py b/providers/tests/google/cloud/hooks/test_kms.py index 0d3c33a880a30..0f30ab61c314d 100644 --- a/providers/tests/google/cloud/hooks/test_kms.py +++ b/providers/tests/google/cloud/hooks/test_kms.py @@ -120,7 +120,7 @@ def test_decrypt(self, mock_get_conn): timeout=None, metadata=(), ) - assert PLAINTEXT == result + assert result == PLAINTEXT @mock.patch("airflow.providers.google.cloud.hooks.kms.CloudKMSHook.get_conn") def test_decrypt_with_auth_data(self, mock_get_conn): @@ -137,4 +137,4 @@ def test_decrypt_with_auth_data(self, mock_get_conn): timeout=None, metadata=(), ) - assert PLAINTEXT == result + assert result == PLAINTEXT diff --git a/providers/tests/google/cloud/hooks/test_life_sciences.py b/providers/tests/google/cloud/hooks/test_life_sciences.py index 990b9331daa26..6a68378b716b7 100644 --- a/providers/tests/google/cloud/hooks/test_life_sciences.py +++ b/providers/tests/google/cloud/hooks/test_life_sciences.py @@ -295,6 +295,7 @@ def test_run_pipeline(self, get_conn_mock, mock_project_id): self.hook.run_pipeline(body={}, location=TEST_LOCATION) assert ( - "The project id must be passed either as keyword project_id parameter or as project_id extra in " - "Google Cloud connection definition. Both are not set!" == str(ctx.value) + str(ctx.value) + == "The project id must be passed either as keyword project_id parameter or as project_id extra in " + "Google Cloud connection definition. Both are not set!" ) diff --git a/providers/tests/google/cloud/hooks/test_pubsub.py b/providers/tests/google/cloud/hooks/test_pubsub.py index 7e5f99bfeafc2..6e8240046b194 100644 --- a/providers/tests/google/cloud/hooks/test_pubsub.py +++ b/providers/tests/google/cloud/hooks/test_pubsub.py @@ -193,7 +193,7 @@ def test_create_nonexistent_subscription(self, mock_service): timeout=None, metadata=(), ) - assert TEST_SUBSCRIPTION == response + assert response == TEST_SUBSCRIPTION @mock.patch(PUBSUB_STRING.format("PubSubHook.subscriber_client")) def test_create_subscription_different_project_topic(self, mock_service): @@ -225,7 +225,7 @@ def test_create_subscription_different_project_topic(self, mock_service): metadata=(), ) - assert TEST_SUBSCRIPTION == response + assert response == TEST_SUBSCRIPTION @mock.patch(PUBSUB_STRING.format("PubSubHook.subscriber_client")) def test_delete_subscription(self, mock_service): @@ -310,7 +310,7 @@ def test_create_subscription_with_ack_deadline(self, mock_service): timeout=None, metadata=(), ) - assert TEST_SUBSCRIPTION == response + assert response == TEST_SUBSCRIPTION @mock.patch(PUBSUB_STRING.format("PubSubHook.subscriber_client")) def test_create_subscription_with_filter(self, mock_service): @@ -341,7 +341,7 @@ def test_create_subscription_with_filter(self, mock_service): timeout=None, metadata=(), ) - assert TEST_SUBSCRIPTION == response + assert response == TEST_SUBSCRIPTION @mock.patch(PUBSUB_STRING.format("PubSubHook.subscriber_client")) def test_create_subscription_failifexists(self, mock_service): @@ -372,7 +372,7 @@ def test_create_subscription_nofailifexists(self, mock_service): response = self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION ) - assert TEST_SUBSCRIPTION == response + assert response == TEST_SUBSCRIPTION @mock.patch(PUBSUB_STRING.format("PubSubHook.get_conn")) def test_publish(self, mock_service): @@ -436,7 +436,7 @@ def test_pull_no_messages(self, mock_service): timeout=None, metadata=(), ) - assert [] == response + assert response == [] @pytest.mark.parametrize( "exception", diff --git a/providers/tests/google/cloud/log/test_gcs_task_handler.py b/providers/tests/google/cloud/log/test_gcs_task_handler.py index eae63caff3946..3d179e7de1b3f 100644 --- a/providers/tests/google/cloud/log/test_gcs_task_handler.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler.py @@ -109,7 +109,7 @@ def test_should_read_logs_from_remote(self, mock_blob, mock_client, mock_creds, ) assert "*** Found remote logs:\n*** * gs://bucket/remote/log/location/1.log\n" in logs assert logs.endswith("CONTENT") - assert {"end_of_log": True, "log_pos": 7} == metadata + assert metadata == {"end_of_log": True, "log_pos": 7} @mock.patch( "airflow.providers.google.cloud.log.gcs_task_handler.get_credentials_and_project_id", diff --git a/providers/tests/google/cloud/log/test_gcs_task_handler_system.py b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py index cdb306a0d18ba..c87d3318098dd 100644 --- a/providers/tests/google/cloud/log/test_gcs_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py @@ -75,8 +75,8 @@ def test_should_read_logs(self, session): AIRFLOW__CORE__DAGS_FOLDER=example_complex.__file__, GOOGLE_APPLICATION_CREDENTIALS=resolve_full_gcp_key_path(GCP_GCS_KEY), ): - assert 0 == subprocess.Popen(["airflow", "dags", "trigger", "example_complex"]).wait() - assert 0 == subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() + assert subprocess.Popen(["airflow", "dags", "trigger", "example_complex"]).wait() == 0 + assert subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() == 0 ti = session.query(TaskInstance).filter(TaskInstance.task_id == "create_entry_group").first() dag = DagBag(dag_folder=example_complex.__file__).dags["example_complex"] diff --git a/providers/tests/google/cloud/log/test_stackdriver_task_handler.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py index 4f309c9f34348..8dd92a13b2a8f 100644 --- a/providers/tests/google/cloud/log/test_stackdriver_task_handler.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py @@ -215,8 +215,8 @@ def test_should_read_logs_for_all_try(self, mock_client, mock_get_creds_and_proj page_token=None, ) ) - assert [(("default-hostname", "MSG1\nMSG2"),)] == logs - assert [{"end_of_log": True}] == metadata + assert logs == [(("default-hostname", "MSG1\nMSG2"),)] + assert metadata == [{"end_of_log": True}] @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id") @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client") @@ -247,8 +247,8 @@ def test_should_read_logs_for_task_with_quote(self, mock_client, mock_get_creds_ page_token=None, ) ) - assert [(("default-hostname", "MSG1\nMSG2"),)] == logs - assert [{"end_of_log": True}] == metadata + assert logs == [(("default-hostname", "MSG1\nMSG2"),)] + assert metadata == [{"end_of_log": True}] @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id") @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client") @@ -278,8 +278,8 @@ def test_should_read_logs_for_single_try(self, mock_client, mock_get_creds_and_p page_token=None, ) ) - assert [(("default-hostname", "MSG1\nMSG2"),)] == logs - assert [{"end_of_log": True}] == metadata + assert logs == [(("default-hostname", "MSG1\nMSG2"),)] + assert metadata == [{"end_of_log": True}] @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id") @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client") @@ -310,8 +310,8 @@ def test_should_read_logs_with_pagination(self, mock_client, mock_get_creds_and_ page_token=None, ) ) - assert [(("default-hostname", "MSG1\nMSG2"),)] == logs - assert [{"end_of_log": False, "next_page_token": "TOKEN1"}] == metadata1 + assert logs == [(("default-hostname", "MSG1\nMSG2"),)] + assert metadata1 == [{"end_of_log": False, "next_page_token": "TOKEN1"}] mock_client.return_value.list_log_entries.return_value.next_page_token = None logs, metadata2 = stackdriver_task_handler.read(self.ti, 3, metadata1[0]) @@ -332,8 +332,8 @@ def test_should_read_logs_with_pagination(self, mock_client, mock_get_creds_and_ page_token="TOKEN1", ) ) - assert [(("default-hostname", "MSG3\nMSG4"),)] == logs - assert [{"end_of_log": True}] == metadata2 + assert logs == [(("default-hostname", "MSG3\nMSG4"),)] + assert metadata2 == [{"end_of_log": True}] @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id") @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client") @@ -347,8 +347,8 @@ def test_should_read_logs_with_download(self, mock_client, mock_get_creds_and_pr stackdriver_task_handler = self._setup_handler() logs, metadata1 = stackdriver_task_handler.read(self.ti, 3, {"download_logs": True}) - assert [(("default-hostname", "MSG1\nMSG2\nMSG3\nMSG4"),)] == logs - assert [{"end_of_log": True}] == metadata1 + assert logs == [(("default-hostname", "MSG1\nMSG2\nMSG3\nMSG4"),)] + assert metadata1 == [{"end_of_log": True}] @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id") @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client") @@ -389,8 +389,8 @@ def test_should_read_logs_with_custom_resources(self, mock_client, mock_get_cred page_token=None, ) ) - assert [(("default-hostname", "TEXT\nTEXT"),)] == logs - assert [{"end_of_log": True}] == metadata + assert logs == [(("default-hostname", "TEXT\nTEXT"),)] + assert metadata == [{"end_of_log": True}] @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id") @mock.patch("airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client") @@ -423,9 +423,9 @@ def test_should_return_valid_external_url(self, mock_client, mock_get_creds_and_ parsed_url = urlsplit(url) parsed_qs = parse_qs(parsed_url.query) - assert "https" == parsed_url.scheme - assert "console.cloud.google.com" == parsed_url.netloc - assert "/logs/viewer" == parsed_url.path + assert parsed_url.scheme == "https" + assert parsed_url.netloc == "console.cloud.google.com" + assert parsed_url.path == "/logs/viewer" assert {"project", "interval", "resource", "advancedFilter"} == set(parsed_qs.keys()) assert "global" in parsed_qs["resource"] diff --git a/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py index 69da68ceea2f1..ad95a487a0892 100644 --- a/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py @@ -64,8 +64,8 @@ def test_should_support_key_auth(self, session): AIRFLOW__CORE__LOAD_EXAMPLES="false", AIRFLOW__CORE__DAGS_FOLDER=example_complex.__file__, ): - assert 0 == subprocess.Popen(["airflow", "dags", "trigger", "example_complex"]).wait() - assert 0 == subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() + assert subprocess.Popen(["airflow", "dags", "trigger", "example_complex"]).wait() == 0 + assert subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() == 0 ti = session.query(TaskInstance).filter(TaskInstance.task_id == "create_entry_group").first() self.assert_remote_logs("terminated with exit code 0", ti) @@ -80,8 +80,8 @@ def test_should_support_adc(self, session): AIRFLOW__CORE__DAGS_FOLDER=example_complex.__file__, GOOGLE_APPLICATION_CREDENTIALS=resolve_full_gcp_key_path(GCP_STACKDRIVER), ): - assert 0 == subprocess.Popen(["airflow", "dags", "trigger", "example_complex"]).wait() - assert 0 == subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() + assert subprocess.Popen(["airflow", "dags", "trigger", "example_complex"]).wait() == 0 + assert subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() == 0 ti = session.query(TaskInstance).filter(TaskInstance.task_id == "create_entry_group").first() self.assert_remote_logs("terminated with exit code 0", ti) diff --git a/providers/tests/google/cloud/operators/test_datacatalog.py b/providers/tests/google/cloud/operators/test_datacatalog.py index 5f83b81fd7f13..c1f4b12656f08 100644 --- a/providers/tests/google/cloud/operators/test_datacatalog.py +++ b/providers/tests/google/cloud/operators/test_datacatalog.py @@ -185,7 +185,7 @@ def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: }, ) - assert TEST_ENTRY_DICT == result + assert result == TEST_ENTRY_DICT @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") @mock.patch(BASE_PATH.format("CloudDataCatalogCreateEntryOperator.xcom_push")) @@ -240,7 +240,7 @@ def test_assert_valid_hook_call_when_exists(self, mock_xcom, mock_hook) -> None: "project_id": TEST_PROJECT_ID, }, ) - assert TEST_ENTRY_DICT == result + assert result == TEST_ENTRY_DICT class TestCloudDataCatalogCreateEntryGroupOperator: @@ -337,7 +337,7 @@ def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: "project_id": TEST_PROJECT_ID, }, ) - assert TEST_TAG_DICT == result + assert result == TEST_TAG_DICT class TestCloudDataCatalogCreateTagTemplateOperator: @@ -383,7 +383,7 @@ def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: "project_id": TEST_PROJECT_ID, }, ) - assert {**result, **TEST_TAG_TEMPLATE_DICT} == result + assert result == {**result, **TEST_TAG_TEMPLATE_DICT} class TestCloudDataCatalogCreateTagTemplateFieldOperator: @@ -431,7 +431,7 @@ def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: "project_id": TEST_PROJECT_ID, }, ) - assert {**result, **TEST_TAG_TEMPLATE_FIELD_DICT} == result + assert result == {**result, **TEST_TAG_TEMPLATE_FIELD_DICT} class TestCloudDataCatalogDeleteEntryOperator: diff --git a/providers/tests/google/cloud/operators/test_dataproc.py b/providers/tests/google/cloud/operators/test_dataproc.py index bb4a9d8287273..d05634f80ff76 100644 --- a/providers/tests/google/cloud/operators/test_dataproc.py +++ b/providers/tests/google/cloud/operators/test_dataproc.py @@ -553,7 +553,7 @@ def test_build(self): driver_pool_size=2, ) cluster = generator.make() - assert CONFIG == cluster + assert cluster == CONFIG def test_build_with_custom_image_family(self): generator = ClusterGenerator( @@ -592,7 +592,7 @@ def test_build_with_custom_image_family(self): enable_component_gateway=True, ) cluster = generator.make() - assert CONFIG_WITH_CUSTOM_IMAGE_FAMILY == cluster + assert cluster == CONFIG_WITH_CUSTOM_IMAGE_FAMILY def test_build_with_flex_migs(self): generator = ClusterGenerator( @@ -642,7 +642,7 @@ def test_build_with_flex_migs(self): ), ) cluster = generator.make() - assert CONFIG_WITH_FLEX_MIG == cluster + assert cluster == CONFIG_WITH_FLEX_MIG def test_build_with_gpu_accelerator(self): generator = ClusterGenerator( @@ -687,7 +687,7 @@ def test_build_with_gpu_accelerator(self): customer_managed_key="customer_managed_key", ) cluster = generator.make() - assert CONFIG_WITH_GPU_ACCELERATOR == cluster + assert cluster == CONFIG_WITH_GPU_ACCELERATOR def test_build_with_default_value_for_internal_ip_only(self): generator = ClusterGenerator(project_id="project_id") diff --git a/providers/tests/google/cloud/operators/test_mlengine.py b/providers/tests/google/cloud/operators/test_mlengine.py index c10a59f94b182..456961242d7be 100644 --- a/providers/tests/google/cloud/operators/test_mlengine.py +++ b/providers/tests/google/cloud/operators/test_mlengine.py @@ -232,7 +232,7 @@ def test_invalid_model_origin(self): task_args["model_name"] = "fake_model" with pytest.raises(AirflowException) as ctx, pytest.warns(AirflowProviderDeprecationWarning): MLEngineStartBatchPredictionJobOperator(**task_args).execute(None) - assert "Ambiguous model origin: Both uri and model/version name are provided." == str(ctx.value) + assert str(ctx.value) == "Ambiguous model origin: Both uri and model/version name are provided." # Test that both uri and model/version is given task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy() @@ -241,7 +241,7 @@ def test_invalid_model_origin(self): task_args["version_name"] = "fake_version" with pytest.raises(AirflowException) as ctx, pytest.warns(AirflowProviderDeprecationWarning): MLEngineStartBatchPredictionJobOperator(**task_args).execute(None) - assert "Ambiguous model origin: Both uri and model/version name are provided." == str(ctx.value) + assert str(ctx.value) == "Ambiguous model origin: Both uri and model/version name are provided." # Test that a version is given without a model task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy() @@ -249,8 +249,8 @@ def test_invalid_model_origin(self): with pytest.raises(AirflowException) as ctx, pytest.warns(AirflowProviderDeprecationWarning): MLEngineStartBatchPredictionJobOperator(**task_args).execute(None) assert ( - "Missing model: Batch prediction expects a model " - "name when a version name is provided." == str(ctx.value) + str(ctx.value) == "Missing model: Batch prediction expects a model " + "name when a version name is provided." ) # Test that none of uri, model, model/version is given @@ -258,8 +258,8 @@ def test_invalid_model_origin(self): with pytest.raises(AirflowException) as ctx, pytest.warns(AirflowProviderDeprecationWarning): MLEngineStartBatchPredictionJobOperator(**task_args).execute(None) assert ( - "Missing model origin: Batch prediction expects a " - "model, a model & version combination, or a URI to a savedModel." == str(ctx.value) + str(ctx.value) == "Missing model origin: Batch prediction expects a " + "model, a model & version combination, or a URI to a savedModel." ) @patch(MLENGINE_AI_PATH.format("MLEngineHook")) @@ -301,7 +301,7 @@ def test_failed_job_error(self, mock_hook): with pytest.raises(RuntimeError) as ctx, pytest.warns(AirflowProviderDeprecationWarning): MLEngineStartBatchPredictionJobOperator(**task_args).execute(None) - assert "A failure message" == str(ctx.value) + assert str(ctx.value) == "A failure message" @pytest.mark.db_test def test_templating(self, create_task_instance_of_operator, session): @@ -1209,7 +1209,7 @@ def test_create_training_job_should_throw_exception_when_job_failed(self, mock_h mock_hook.return_value.create_job_without_waiting_result.assert_called_once_with( project_id="test-project", body=self.TRAINING_INPUT ) - assert "A failure message" == str(ctx.value) + assert str(ctx.value) == "A failure message" @pytest.mark.db_test def test_templating(self, create_task_instance_of_operator, session): diff --git a/providers/tests/google/cloud/operators/test_pubsub.py b/providers/tests/google/cloud/operators/test_pubsub.py index ed65f165776bb..9b6f0dce393c6 100644 --- a/providers/tests/google/cloud/operators/test_pubsub.py +++ b/providers/tests/google/cloud/operators/test_pubsub.py @@ -279,7 +279,7 @@ def test_execute_no_messages(self, mock_hook): ) mock_hook.return_value.pull.return_value = [] - assert [] == operator.execute({}) + assert operator.execute({}) == [] @mock.patch("airflow.providers.google.cloud.operators.pubsub.PubSubHook") def test_execute_with_ack_messages(self, mock_hook): diff --git a/providers/tests/google/cloud/operators/test_stackdriver.py b/providers/tests/google/cloud/operators/test_stackdriver.py index f4038bdf1e405..2b93654ffb343 100644 --- a/providers/tests/google/cloud/operators/test_stackdriver.py +++ b/providers/tests/google/cloud/operators/test_stackdriver.py @@ -105,7 +105,7 @@ def test_execute(self, mock_hook): timeout=None, metadata=(), ) - assert [ + assert result == [ { "combiner": 0, "conditions": [], @@ -115,7 +115,7 @@ def test_execute(self, mock_hook): "severity": 0, "user_labels": {}, } - ] == result + ] class TestStackdriverEnableAlertPoliciesOperator: diff --git a/providers/tests/google/cloud/operators/test_tasks.py b/providers/tests/google/cloud/operators/test_tasks.py index 6d41433187cd0..ec6976b7ae23a 100644 --- a/providers/tests/google/cloud/operators/test_tasks.py +++ b/providers/tests/google/cloud/operators/test_tasks.py @@ -58,7 +58,7 @@ def test_create_queue(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert {"name": FULL_QUEUE_PATH, "state": 0} == result + assert result == {"name": FULL_QUEUE_PATH, "state": 0} mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -82,7 +82,7 @@ def test_update_queue(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert {"name": FULL_QUEUE_PATH, "state": 0} == result + assert result == {"name": FULL_QUEUE_PATH, "state": 0} mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -107,7 +107,7 @@ def test_get_queue(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert {"name": FULL_QUEUE_PATH, "state": 0} == result + assert result == {"name": FULL_QUEUE_PATH, "state": 0} mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -130,7 +130,7 @@ def test_list_queues(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert [{"name": FULL_QUEUE_PATH, "state": 0}] == result + assert result == [{"name": FULL_QUEUE_PATH, "state": 0}] mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -176,7 +176,7 @@ def test_delete_queue(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert {"name": FULL_QUEUE_PATH, "state": 0} == result + assert result == {"name": FULL_QUEUE_PATH, "state": 0} mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -199,7 +199,7 @@ def test_pause_queue(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert {"name": FULL_QUEUE_PATH, "state": 0} == result + assert result == {"name": FULL_QUEUE_PATH, "state": 0} mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -222,7 +222,7 @@ def test_resume_queue(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert {"name": FULL_QUEUE_PATH, "state": 0} == result + assert result == {"name": FULL_QUEUE_PATH, "state": 0} mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -247,13 +247,13 @@ def test_create_task(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert { + assert result == { "app_engine_http_request": {"body": "", "headers": {}, "http_method": 0, "relative_uri": ""}, "dispatch_count": 0, "name": "", "response_count": 0, "view": 0, - } == result + } mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -281,13 +281,13 @@ def test_get_task(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert { + assert result == { "app_engine_http_request": {"body": "", "headers": {}, "http_method": 0, "relative_uri": ""}, "dispatch_count": 0, "name": "", "response_count": 0, "view": 0, - } == result + } mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -312,7 +312,7 @@ def test_list_tasks(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert [ + assert result == [ { "app_engine_http_request": { "body": "", @@ -325,7 +325,7 @@ def test_list_tasks(self, mock_hook): "response_count": 0, "view": 0, } - ] == result + ] mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -377,13 +377,13 @@ def test_run_task(self, mock_hook): result = operator.execute(context=mock.MagicMock()) - assert { + assert result == { "app_engine_http_request": {"body": "", "headers": {}, "http_method": 0, "relative_uri": ""}, "dispatch_count": 0, "name": "", "response_count": 0, "view": 0, - } == result + } mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, diff --git a/providers/tests/google/cloud/operators/test_translate.py b/providers/tests/google/cloud/operators/test_translate.py index 45af2dae92890..7af9803ea226f 100644 --- a/providers/tests/google/cloud/operators/test_translate.py +++ b/providers/tests/google/cloud/operators/test_translate.py @@ -76,14 +76,14 @@ def test_minimal_green_path(self, mock_hook): source_language=None, model="base", ) - assert [ + assert return_value == [ { "translatedText": "Yellowing self Gęśle", "detectedSourceLanguage": "pl", "model": "base", "input": "zażółć gęślą jaźń", } - ] == return_value + ] class TestTranslateText: diff --git a/providers/tests/google/cloud/operators/test_translate_speech.py b/providers/tests/google/cloud/operators/test_translate_speech.py index 8e6beb79b9702..0175232df2fc2 100644 --- a/providers/tests/google/cloud/operators/test_translate_speech.py +++ b/providers/tests/google/cloud/operators/test_translate_speech.py @@ -90,14 +90,14 @@ def test_minimal_green_path(self, mock_translate_hook, mock_speech_hook): source_language=None, model="base", ) - assert [ + assert return_value == [ { "translatedText": "sprawdzić wynik rozpoznawania mowy", "detectedSourceLanguage": "en", "model": "base", "input": "test speech recognition result", } - ] == return_value + ] @mock.patch("airflow.providers.google.cloud.operators.translate_speech.CloudSpeechToTextHook") @mock.patch("airflow.providers.google.cloud.operators.translate_speech.CloudTranslateHook") diff --git a/providers/tests/google/cloud/operators/test_vision.py b/providers/tests/google/cloud/operators/test_vision.py index 38eb74facad0e..03c961bb69951 100644 --- a/providers/tests/google/cloud/operators/test_vision.py +++ b/providers/tests/google/cloud/operators/test_vision.py @@ -93,7 +93,7 @@ def test_already_exists(self, mock_hook): task_id="id", ) result = op.execute(None) - assert PRODUCTSET_ID_TEST == result + assert result == PRODUCTSET_ID_TEST class TestCloudVisionProductSetUpdate: @@ -196,7 +196,7 @@ def test_already_exists(self, mock_hook): task_id="id", ) result = op.execute(None) - assert PRODUCT_ID_TEST == result + assert result == PRODUCT_ID_TEST class TestCloudVisionProductGet: diff --git a/providers/tests/google/cloud/secrets/test_secret_manager.py b/providers/tests/google/cloud/secrets/test_secret_manager.py index c56664f23e3f3..4740e917b4bf2 100644 --- a/providers/tests/google/cloud/secrets/test_secret_manager.py +++ b/providers/tests/google/cloud/secrets/test_secret_manager.py @@ -108,7 +108,7 @@ def test_get_conn_uri(self, mock_client_callable, mock_get_creds, connections_pr secrets_manager_backend = CloudSecretManagerBackend(connections_prefix=connections_prefix) secret_id = secrets_manager_backend.build_path(connections_prefix, CONN_ID, SEP) returned_uri = secrets_manager_backend.get_conn_value(conn_id=CONN_ID) - assert CONN_URI == returned_uri + assert returned_uri == CONN_URI mock_client.secret_version_path.assert_called_once_with(PROJECT_ID, secret_id, "latest") @mock.patch(MODULE_NAME + ".get_credentials_and_project_id") @@ -153,7 +153,7 @@ def test_get_variable(self, mock_client_callable, mock_get_creds, variables_pref secrets_manager_backend = CloudSecretManagerBackend(variables_prefix=variables_prefix) secret_id = secrets_manager_backend.build_path(variables_prefix, VAR_KEY, SEP) returned_uri = secrets_manager_backend.get_variable(VAR_KEY) - assert VAR_VALUE == returned_uri + assert returned_uri == VAR_VALUE mock_client.secret_version_path.assert_called_once_with(PROJECT_ID, secret_id, "latest") @pytest.mark.parametrize("config_prefix", ["airflow-config", "config", "airflow"]) @@ -171,7 +171,7 @@ def test_get_config(self, mock_client_callable, mock_get_creds, config_prefix): secrets_manager_backend = CloudSecretManagerBackend(config_prefix=config_prefix) secret_id = secrets_manager_backend.build_path(config_prefix, CONFIG_KEY, SEP) returned_val = secrets_manager_backend.get_config(CONFIG_KEY) - assert CONFIG_VALUE == returned_val + assert returned_val == CONFIG_VALUE mock_client.secret_version_path.assert_called_once_with(PROJECT_ID, secret_id, "latest") @pytest.mark.parametrize("variables_prefix", ["airflow-variables", "variables", "airflow"]) @@ -191,7 +191,7 @@ def test_get_variable_override_project_id(self, mock_client_callable, mock_get_c ) secret_id = secrets_manager_backend.build_path(variables_prefix, VAR_KEY, SEP) returned_uri = secrets_manager_backend.get_variable(VAR_KEY) - assert VAR_VALUE == returned_uri + assert returned_uri == VAR_VALUE mock_client.secret_version_path.assert_called_once_with(OVERRIDDEN_PROJECT_ID, secret_id, "latest") @mock.patch(MODULE_NAME + ".get_credentials_and_project_id") diff --git a/providers/tests/google/cloud/transfers/test_mssql_to_gcs.py b/providers/tests/google/cloud/transfers/test_mssql_to_gcs.py index 5ee8efe5a45ae..18ccb502127a4 100644 --- a/providers/tests/google/cloud/transfers/test_mssql_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_mssql_to_gcs.py @@ -110,10 +110,10 @@ def test_exec_success_json(self, gcs_hook_mock_class, mssql_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert JSON_FILENAME.format(0) == obj - assert "application/json" == mime_type - assert GZIP == gzip + assert mime_type == "application/json" + assert gzip == GZIP with open(tmp_filename, "rb") as file: assert b"".join(NDJSON_LINES) == file.read() @@ -139,9 +139,9 @@ def test_file_splitting(self, gcs_hook_mock_class, mssql_hook_mock_class): } def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket - assert "application/json" == mime_type - assert GZIP == gzip + assert bucket == BUCKET + assert mime_type == "application/json" + assert gzip == GZIP with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -187,4 +187,4 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == gcs_hook_mock.upload.call_count + assert gcs_hook_mock.upload.call_count == 2 diff --git a/providers/tests/google/cloud/transfers/test_mysql_to_gcs.py b/providers/tests/google/cloud/transfers/test_mysql_to_gcs.py index 515014dbda878..01c3498c954ba 100644 --- a/providers/tests/google/cloud/transfers/test_mysql_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_mysql_to_gcs.py @@ -130,9 +130,9 @@ def test_exec_success_json(self, gcs_hook_mock_class, mysql_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert JSON_FILENAME.format(0) == obj - assert "application/json" == mime_type + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(NDJSON_LINES) == file.read() @@ -164,9 +164,9 @@ def test_exec_success_csv(self, gcs_hook_mock_class, mysql_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert CSV_FILENAME.format(0) == obj - assert "text/csv" == mime_type + assert mime_type == "text/csv" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(CSV_LINES) == file.read() @@ -199,9 +199,9 @@ def test_exec_success_csv_ensure_utc(self, gcs_hook_mock_class, mysql_hook_mock_ gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert CSV_FILENAME.format(0) == obj - assert "text/csv" == mime_type + assert mime_type == "text/csv" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(CSV_LINES) == file.read() @@ -234,9 +234,9 @@ def test_exec_success_csv_with_delimiter(self, gcs_hook_mock_class, mysql_hook_m gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert CSV_FILENAME.format(0) == obj - assert "text/csv" == mime_type + assert mime_type == "text/csv" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(CSV_LINES_PIPE_DELIMITED) == file.read() @@ -263,8 +263,8 @@ def test_file_splitting(self, gcs_hook_mock_class, mysql_hook_mock_class): } def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket - assert "application/json" == mime_type + assert bucket == BUCKET + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -304,7 +304,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == gcs_hook_mock.upload.call_count + assert gcs_hook_mock.upload.call_count == 2 @mock.patch("airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook") @mock.patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") @@ -335,7 +335,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == gcs_hook_mock.upload.call_count + assert gcs_hook_mock.upload.call_count == 2 @mock.patch("airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook") @mock.patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") diff --git a/providers/tests/google/cloud/transfers/test_oracle_to_gcs.py b/providers/tests/google/cloud/transfers/test_oracle_to_gcs.py index ce160efdd4ed8..b2560c1f25561 100644 --- a/providers/tests/google/cloud/transfers/test_oracle_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_oracle_to_gcs.py @@ -71,10 +71,10 @@ def test_exec_success_json(self, gcs_hook_mock_class, oracle_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert JSON_FILENAME.format(0) == obj - assert "application/json" == mime_type - assert GZIP == gzip + assert mime_type == "application/json" + assert gzip == GZIP with open(tmp_filename, "rb") as file: assert b"".join(NDJSON_LINES) == file.read() @@ -100,9 +100,9 @@ def test_file_splitting(self, gcs_hook_mock_class, oracle_hook_mock_class): } def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket - assert "application/json" == mime_type - assert GZIP == gzip + assert bucket == BUCKET + assert mime_type == "application/json" + assert gzip == GZIP with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -140,4 +140,4 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == gcs_hook_mock.upload.call_count + assert gcs_hook_mock.upload.call_count == 2 diff --git a/providers/tests/google/cloud/transfers/test_postgres_to_gcs.py b/providers/tests/google/cloud/transfers/test_postgres_to_gcs.py index d5d5d0cedb98e..dbc68935b687a 100644 --- a/providers/tests/google/cloud/transfers/test_postgres_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_postgres_to_gcs.py @@ -93,9 +93,9 @@ def test_init(self): assert op.filename == FILENAME def _assert_uploaded_file_content(self, bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert FILENAME.format(0) == obj - assert "application/json" == mime_type + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(NDJSON_LINES) == file.read() @@ -180,8 +180,8 @@ def test_file_splitting(self, gcs_hook_mock_class): } def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket - assert "application/json" == mime_type + assert bucket == BUCKET + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -206,7 +206,7 @@ def test_schema_file(self, gcs_hook_mock_class): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: - assert SCHEMA_JSON == file.read() + assert file.read() == SCHEMA_JSON gcs_hook_mock.upload.side_effect = _assert_upload @@ -216,4 +216,4 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == gcs_hook_mock.upload.call_count + assert gcs_hook_mock.upload.call_count == 2 diff --git a/providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py index 7d4a6b2d6efb6..85d056b30c9d5 100644 --- a/providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py @@ -85,4 +85,4 @@ def test_execute(self, mock_make_query, mock_write_object_to_file, mock_upload): bucket_name=GCS_BUCKET, object_name=GCS_OBJECT_PATH, filename=mock.ANY, gzip=False ) - assert EXPECTED_GCS_URI == result + assert result == EXPECTED_GCS_URI diff --git a/providers/tests/google/cloud/transfers/test_trino_to_gcs.py b/providers/tests/google/cloud/transfers/test_trino_to_gcs.py index 09da568f6a81b..14aaf2cda9716 100644 --- a/providers/tests/google/cloud/transfers/test_trino_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_trino_to_gcs.py @@ -64,9 +64,9 @@ def test_init(self): @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") def test_save_as_json(self, mock_gcs_hook, mock_trino_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert FILENAME.format(0) == obj - assert "application/json" == mime_type + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(NDJSON_LINES) == file.read() @@ -118,8 +118,8 @@ def test_save_as_json_with_file_splitting(self, mock_gcs_hook, mock_trino_hook): } def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket - assert "application/json" == mime_type + assert bucket == BUCKET + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -160,7 +160,7 @@ def test_save_as_json_with_schema_file(self, mock_gcs_hook, mock_trino_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: - assert SCHEMA_JSON == file.read() + assert file.read() == SCHEMA_JSON mock_gcs_hook.return_value.upload.side_effect = _assert_upload @@ -191,15 +191,15 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == mock_gcs_hook.return_value.upload.call_count + assert mock_gcs_hook.return_value.upload.call_count == 2 @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") def test_save_as_csv(self, mock_trino_hook, mock_gcs_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert FILENAME.format(0) == obj - assert "text/csv" == mime_type + assert mime_type == "text/csv" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(CSV_LINES) == file.read() @@ -252,8 +252,8 @@ def test_save_as_csv_with_file_splitting(self, mock_gcs_hook, mock_trino_hook): } def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket - assert "text/csv" == mime_type + assert bucket == BUCKET + assert mime_type == "text/csv" assert not gzip with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -295,7 +295,7 @@ def test_save_as_csv_with_schema_file(self, mock_gcs_hook, mock_trino_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: - assert SCHEMA_JSON == file.read() + assert file.read() == SCHEMA_JSON mock_gcs_hook.return_value.upload.side_effect = _assert_upload @@ -324,4 +324,4 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == mock_gcs_hook.return_value.upload.call_count + assert mock_gcs_hook.return_value.upload.call_count == 2 diff --git a/providers/tests/google/cloud/utils/test_credentials_provider.py b/providers/tests/google/cloud/utils/test_credentials_provider.py index 2e6873775745a..91a5cac5f0609 100644 --- a/providers/tests/google/cloud/utils/test_credentials_provider.py +++ b/providers/tests/google/cloud/utils/test_credentials_provider.py @@ -105,17 +105,17 @@ class TestHelper: def test_build_gcp_conn_path(self): value = "test" conn = build_gcp_conn(key_file_path=value) - assert "google-cloud-platform://?key_path=test" == conn + assert conn == "google-cloud-platform://?key_path=test" def test_build_gcp_conn_scopes(self): value = ["test", "test2"] conn = build_gcp_conn(scopes=value) - assert "google-cloud-platform://?scope=test%2Ctest2" == conn + assert conn == "google-cloud-platform://?scope=test%2Ctest2" def test_build_gcp_conn_project(self): value = "test" conn = build_gcp_conn(project_id=value) - assert "google-cloud-platform://?projects=test" == conn + assert conn == "google-cloud-platform://?projects=test" class TestProvideGcpCredentials: @@ -188,7 +188,7 @@ def test_get_credentials_and_project_id_with_default_auth(self, mock_auth_defaul caplog.clear() result = get_credentials_and_project_id() mock_auth_default.assert_called_once_with(scopes=None) - assert ("CREDENTIALS", "PROJECT_ID") == result + assert result == ("CREDENTIALS", "PROJECT_ID") assert ( "Getting connection using `google.auth.default()` since no explicit credentials are provided." ) in caplog.messages diff --git a/providers/tests/google/cloud/utils/test_field_sanitizer.py b/providers/tests/google/cloud/utils/test_field_sanitizer.py index 98220b1f1bdc6..2f89f35841ea9 100644 --- a/providers/tests/google/cloud/utils/test_field_sanitizer.py +++ b/providers/tests/google/cloud/utils/test_field_sanitizer.py @@ -31,7 +31,7 @@ def test_sanitize_should_sanitize_empty_body_and_fields(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert {} == body + assert body == {} def test_sanitize_should_not_fail_with_none_body(self): body = None @@ -58,7 +58,7 @@ def test_sanitize_should_not_fail_if_field_is_absent_in_body(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert {} == body + assert body == {} def test_sanitize_should_not_remove_fields_for_incorrect_specification(self): actual_body = [ @@ -81,7 +81,7 @@ def test_sanitize_should_remove_all_fields_from_root_level(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert {"name": "instance"} == body + assert body == {"name": "instance"} def test_sanitize_should_remove_for_multiple_fields_from_root_level(self): body = {"kind": "compute#instanceTemplate", "name": "instance"} @@ -90,7 +90,7 @@ def test_sanitize_should_remove_for_multiple_fields_from_root_level(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert {} == body + assert body == {} def test_sanitize_should_remove_all_fields_in_a_list_value(self): body = { @@ -105,13 +105,13 @@ def test_sanitize_should_remove_all_fields_in_a_list_value(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert { + assert body == { "fields": [ {"name": "instance"}, {"name": "instance1"}, {"name": "instance2"}, ] - } == body + } def test_sanitize_should_remove_all_fields_in_any_nested_body(self): fields_to_sanitize = [ @@ -144,7 +144,7 @@ def test_sanitize_should_remove_all_fields_in_any_nested_body(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert { + assert body == { "name": "instance", "properties": { "disks": [ @@ -153,7 +153,7 @@ def test_sanitize_should_remove_all_fields_in_any_nested_body(self): ], "metadata": {"fingerprint": "GDPUYxlwHe4="}, }, - } == body + } def test_sanitize_should_not_fail_if_specification_has_none_value(self): fields_to_sanitize = [ @@ -167,7 +167,7 @@ def test_sanitize_should_not_fail_if_specification_has_none_value(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert {"name": "instance", "properties": {"disks": None}} == body + assert body == {"name": "instance", "properties": {"disks": None}} def test_sanitize_should_not_fail_if_no_specification_matches(self): fields_to_sanitize = [ @@ -180,7 +180,7 @@ def test_sanitize_should_not_fail_if_no_specification_matches(self): sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert {"name": "instance", "properties": {"disks": None}} == body + assert body == {"name": "instance", "properties": {"disks": None}} def test_sanitize_should_not_fail_if_type_in_body_do_not_match_with_specification(self): fields_to_sanitize = [ @@ -193,4 +193,4 @@ def test_sanitize_should_not_fail_if_type_in_body_do_not_match_with_specificatio sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) sanitizer.sanitize(body) - assert {"name": "instance", "properties": {"disks": 1}} == body + assert body == {"name": "instance", "properties": {"disks": 1}} diff --git a/providers/tests/google/cloud/utils/test_mlengine_operator_utils.py b/providers/tests/google/cloud/utils/test_mlengine_operator_utils.py index aae62afb002ee..4384eb7fea874 100644 --- a/providers/tests/google/cloud/utils/test_mlengine_operator_utils.py +++ b/providers/tests/google/cloud/utils/test_mlengine_operator_utils.py @@ -120,23 +120,23 @@ def test_create_evaluate_ops(self, mock_beam_pipeline, mock_python): # at the top of the file. METRIC_FN_ENCODED = base64.b64encode(dill.dumps(METRIC_FN, recurse=True)).decode() - assert TASK_PREFIX_PREDICTION == evaluate_prediction.task_id - assert PROJECT_ID == evaluate_prediction.project_id - assert BATCH_PREDICTION_JOB_ID == evaluate_prediction.job_id - assert REGION == evaluate_prediction.region - assert DATA_FORMAT == evaluate_prediction._data_format - assert INPUT_PATHS == evaluate_prediction.input_paths - assert PREDICTION_PATH == evaluate_prediction.output_path - assert MODEL_URI == evaluate_prediction.uri - - assert TASK_PREFIX_SUMMARY == evaluate_summary.task_id - assert DATAFLOW_OPTIONS == evaluate_summary.default_pipeline_options - assert PREDICTION_PATH == evaluate_summary.pipeline_options["prediction_path"] - assert METRIC_FN_ENCODED == evaluate_summary.pipeline_options["metric_fn_encoded"] - assert METRIC_KEYS_EXPECTED == evaluate_summary.pipeline_options["metric_keys"] - - assert TASK_PREFIX_VALIDATION == evaluate_validation.task_id - assert PREDICTION_PATH == evaluate_validation.templates_dict["prediction_path"] + assert evaluate_prediction.task_id == TASK_PREFIX_PREDICTION + assert evaluate_prediction.project_id == PROJECT_ID + assert evaluate_prediction.job_id == BATCH_PREDICTION_JOB_ID + assert evaluate_prediction.region == REGION + assert evaluate_prediction._data_format == DATA_FORMAT + assert evaluate_prediction.input_paths == INPUT_PATHS + assert evaluate_prediction.output_path == PREDICTION_PATH + assert evaluate_prediction.uri == MODEL_URI + + assert evaluate_summary.task_id == TASK_PREFIX_SUMMARY + assert evaluate_summary.default_pipeline_options == DATAFLOW_OPTIONS + assert evaluate_summary.pipeline_options["prediction_path"] == PREDICTION_PATH + assert evaluate_summary.pipeline_options["metric_fn_encoded"] == METRIC_FN_ENCODED + assert evaluate_summary.pipeline_options["metric_keys"] == METRIC_KEYS_EXPECTED + + assert evaluate_validation.task_id == TASK_PREFIX_VALIDATION + assert evaluate_validation.templates_dict["prediction_path"] == PREDICTION_PATH @mock.patch.object(PythonOperator, "set_upstream") @mock.patch.object(BeamRunPythonPipelineOperator, "set_upstream") @@ -167,24 +167,24 @@ def test_create_evaluate_ops_model_and_version_name(self, mock_beam_pipeline, mo # at the top of the file. METRIC_FN_ENCODED = base64.b64encode(dill.dumps(METRIC_FN, recurse=True)).decode() - assert TASK_PREFIX_PREDICTION == evaluate_prediction.task_id - assert PROJECT_ID == evaluate_prediction.project_id - assert BATCH_PREDICTION_JOB_ID == evaluate_prediction.job_id - assert REGION == evaluate_prediction.region - assert DATA_FORMAT == evaluate_prediction._data_format - assert INPUT_PATHS == evaluate_prediction.input_paths - assert PREDICTION_PATH == evaluate_prediction.output_path - assert MODEL_NAME == evaluate_prediction.model_name - assert VERSION_NAME == evaluate_prediction.version_name - - assert TASK_PREFIX_SUMMARY == evaluate_summary.task_id - assert DATAFLOW_OPTIONS == evaluate_summary.default_pipeline_options - assert PREDICTION_PATH == evaluate_summary.pipeline_options["prediction_path"] - assert METRIC_FN_ENCODED == evaluate_summary.pipeline_options["metric_fn_encoded"] - assert METRIC_KEYS_EXPECTED == evaluate_summary.pipeline_options["metric_keys"] - - assert TASK_PREFIX_VALIDATION == evaluate_validation.task_id - assert PREDICTION_PATH == evaluate_validation.templates_dict["prediction_path"] + assert evaluate_prediction.task_id == TASK_PREFIX_PREDICTION + assert evaluate_prediction.project_id == PROJECT_ID + assert evaluate_prediction.job_id == BATCH_PREDICTION_JOB_ID + assert evaluate_prediction.region == REGION + assert evaluate_prediction._data_format == DATA_FORMAT + assert evaluate_prediction.input_paths == INPUT_PATHS + assert evaluate_prediction.output_path == PREDICTION_PATH + assert evaluate_prediction.model_name == MODEL_NAME + assert evaluate_prediction.version_name == VERSION_NAME + + assert evaluate_summary.task_id == TASK_PREFIX_SUMMARY + assert evaluate_summary.default_pipeline_options == DATAFLOW_OPTIONS + assert evaluate_summary.pipeline_options["prediction_path"] == PREDICTION_PATH + assert evaluate_summary.pipeline_options["metric_fn_encoded"] == METRIC_FN_ENCODED + assert evaluate_summary.pipeline_options["metric_keys"] == METRIC_KEYS_EXPECTED + + assert evaluate_validation.task_id == TASK_PREFIX_VALIDATION + assert evaluate_validation.templates_dict["prediction_path"] == PREDICTION_PATH @mock.patch.object(PythonOperator, "set_upstream") @mock.patch.object(BeamRunPythonPipelineOperator, "set_upstream") @@ -211,24 +211,24 @@ def test_create_evaluate_ops_dag(self, mock_dataflow, mock_python): # at the top of the file. METRIC_FN_ENCODED = base64.b64encode(dill.dumps(METRIC_FN, recurse=True)).decode() - assert TASK_PREFIX_PREDICTION == evaluate_prediction.task_id - assert PROJECT_ID == evaluate_prediction.project_id - assert BATCH_PREDICTION_JOB_ID == evaluate_prediction.job_id - assert REGION == evaluate_prediction.region - assert DATA_FORMAT == evaluate_prediction._data_format - assert INPUT_PATHS == evaluate_prediction.input_paths - assert PREDICTION_PATH == evaluate_prediction.output_path - assert MODEL_NAME == evaluate_prediction.model_name - assert VERSION_NAME == evaluate_prediction.version_name - - assert TASK_PREFIX_SUMMARY == evaluate_summary.task_id - assert DATAFLOW_OPTIONS == evaluate_summary.default_pipeline_options - assert PREDICTION_PATH == evaluate_summary.pipeline_options["prediction_path"] - assert METRIC_FN_ENCODED == evaluate_summary.pipeline_options["metric_fn_encoded"] - assert METRIC_KEYS_EXPECTED == evaluate_summary.pipeline_options["metric_keys"] - - assert TASK_PREFIX_VALIDATION == evaluate_validation.task_id - assert PREDICTION_PATH == evaluate_validation.templates_dict["prediction_path"] + assert evaluate_prediction.task_id == TASK_PREFIX_PREDICTION + assert evaluate_prediction.project_id == PROJECT_ID + assert evaluate_prediction.job_id == BATCH_PREDICTION_JOB_ID + assert evaluate_prediction.region == REGION + assert evaluate_prediction._data_format == DATA_FORMAT + assert evaluate_prediction.input_paths == INPUT_PATHS + assert evaluate_prediction.output_path == PREDICTION_PATH + assert evaluate_prediction.model_name == MODEL_NAME + assert evaluate_prediction.version_name == VERSION_NAME + + assert evaluate_summary.task_id == TASK_PREFIX_SUMMARY + assert evaluate_summary.default_pipeline_options == DATAFLOW_OPTIONS + assert evaluate_summary.pipeline_options["prediction_path"] == PREDICTION_PATH + assert evaluate_summary.pipeline_options["metric_fn_encoded"] == METRIC_FN_ENCODED + assert evaluate_summary.pipeline_options["metric_keys"] == METRIC_KEYS_EXPECTED + + assert evaluate_validation.task_id == TASK_PREFIX_VALIDATION + assert evaluate_validation.templates_dict["prediction_path"] == PREDICTION_PATH @pytest.mark.db_test @mock.patch.object(GCSHook, "download") @@ -257,7 +257,7 @@ def test_apply_validate_fn(self, mock_beam_pipeline, mock_python, mock_download) with pytest.raises(ValueError) as ctx: evaluate_validation.python_callable(templates_dict=templates_dict) - assert "Too high err>0.2; summary={'err': 0.3, 'mse': 0.04, 'count': 1100}" == str(ctx.value) + assert str(ctx.value) == "Too high err>0.2; summary={'err': 0.3, 'mse': 0.04, 'count': 1100}" mock_download.assert_called_once_with("path", "to/output/predictions.json/prediction.summary.json") invalid_prediction_paths = ["://path/to/output/predictions.json", "gs://", ""] @@ -266,7 +266,7 @@ def test_apply_validate_fn(self, mock_beam_pipeline, mock_python, mock_download) templates_dict = {"prediction_path": path} with pytest.raises(ValueError) as ctx: evaluate_validation.python_callable(templates_dict=templates_dict) - assert "Wrong format prediction_path:" == str(ctx.value)[:29] + assert str(ctx.value)[:29] == "Wrong format prediction_path:" def test_invalid_task_prefix(self): invalid_task_prefix_values = ["test-task-prefix&", "~test-task-prefix", "test-task(-prefix"] diff --git a/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py b/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py index 984c4847d9424..8697f32c7b09c 100644 --- a/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py +++ b/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py @@ -34,10 +34,10 @@ class TestJsonCode: def test_encode(self): - assert b'{"a": 1}' == mlengine_prediction_summary.JsonCoder.encode({"a": 1}) + assert mlengine_prediction_summary.JsonCoder.encode({"a": 1}) == b'{"a": 1}' def test_decode(self): - assert {"a": 1} == mlengine_prediction_summary.JsonCoder.decode('{"a": 1}') + assert mlengine_prediction_summary.JsonCoder.decode('{"a": 1}') == {"a": 1} class TestMakeSummary: diff --git a/providers/tests/google/common/auth_backend/test_google_openid.py b/providers/tests/google/common/auth_backend/test_google_openid.py index 6d537c497e1b3..5ddc978069913 100644 --- a/providers/tests/google/common/auth_backend/test_google_openid.py +++ b/providers/tests/google/common/auth_backend/test_google_openid.py @@ -95,7 +95,7 @@ def test_success(self, mock_verify_token): with self.app.test_client() as test_client: response = test_client.get("/api/v1/pools", headers={"Authorization": "bearer JWT_TOKEN"}) - assert 200 == response.status_code + assert response.status_code == 200 assert "Default pool" in str(response.json) @pytest.mark.parametrize("auth_header", ["bearer", "JWT_TOKEN", "bearer "]) @@ -110,7 +110,7 @@ def test_malformed_headers(self, mock_verify_token, auth_header): with self.app.test_client() as test_client: response = test_client.get("/api/v1/pools", headers={"Authorization": auth_header}) - assert 401 == response.status_code + assert response.status_code == 401 @mock.patch("google.oauth2.id_token.verify_token") def test_invalid_iss_in_jwt_token(self, mock_verify_token): @@ -123,7 +123,7 @@ def test_invalid_iss_in_jwt_token(self, mock_verify_token): with self.app.test_client() as test_client: response = test_client.get("/api/v1/pools", headers={"Authorization": "bearer JWT_TOKEN"}) - assert 401 == response.status_code + assert response.status_code == 401 @mock.patch("google.oauth2.id_token.verify_token") def test_user_not_exists(self, mock_verify_token): @@ -136,14 +136,14 @@ def test_user_not_exists(self, mock_verify_token): with self.app.test_client() as test_client: response = test_client.get("/api/v1/pools", headers={"Authorization": "bearer JWT_TOKEN"}) - assert 401 == response.status_code + assert response.status_code == 401 @conf_vars({("api", "auth_backends"): "airflow.providers.google.common.auth_backend.google_openid"}) def test_missing_id_token(self): with self.app.test_client() as test_client: response = test_client.get("/api/v1/pools") - assert 401 == response.status_code + assert response.status_code == 401 @conf_vars({("api", "auth_backends"): "airflow.providers.google.common.auth_backend.google_openid"}) @mock.patch("google.oauth2.id_token.verify_token") @@ -153,4 +153,4 @@ def test_invalid_id_token(self, mock_verify_token): with self.app.test_client() as test_client: response = test_client.get("/api/v1/pools", headers={"Authorization": "bearer JWT_TOKEN"}) - assert 401 == response.status_code + assert response.status_code == 401 diff --git a/providers/tests/google/common/hooks/test_base_google.py b/providers/tests/google/common/hooks/test_base_google.py index d3ce25fd0ed77..cbf95b8b78cd3 100644 --- a/providers/tests/google/common/hooks/test_base_google.py +++ b/providers/tests/google/common/hooks/test_base_google.py @@ -88,7 +88,7 @@ def test_retry_on_exception(self): errors = [mock.MagicMock(details=mock.PropertyMock(return_value="userRateLimitExceeded"))] custom_fn = NoForbiddenAfterCount(count=5, message=message, errors=errors) _retryable_test_with_temporary_quota_retry(custom_fn) - assert 5 == custom_fn.counter + assert custom_fn.counter == 5 def test_raise_exception_on_non_quota_exception(self): message = "POST https://translation.googleapis.com/language/translate/v2: Daily Limit Exceeded" @@ -418,7 +418,7 @@ def test_get_credentials_and_project_id_with_default_auth(self, mock_get_creds_a client_secret=None, idp_extra_params_dict=None, ) - assert ("CREDENTIALS", "PROJECT_ID") == result + assert result == ("CREDENTIALS", "PROJECT_ID") @mock.patch("requests.post") @mock.patch(MODULE_NAME + ".get_credentials_and_project_id") @@ -539,7 +539,7 @@ def test_get_credentials_and_project_id_with_default_auth_and_overridden_project client_secret=None, idp_extra_params_dict=None, ) - assert ("CREDENTIALS", "SECOND_PROJECT_ID") == result + assert result == ("CREDENTIALS", "SECOND_PROJECT_ID") def test_get_credentials_and_project_id_with_mutually_exclusive_configuration(self): self.instance.extras = { @@ -912,7 +912,7 @@ def test_should_return_int_when_set_int_via_connection(self): } assert isinstance(instance.num_retries, int) - assert 10 == instance.num_retries + assert instance.num_retries == 10 @mock.patch.dict( "os.environ", @@ -938,7 +938,7 @@ def test_should_raise_when_invalid_value_via_env_var(self): def test_should_fallback_when_empty_string_in_env_var(self): instance = hook.GoogleBaseHook(gcp_conn_id="google_cloud_default") assert isinstance(instance.num_retries, int) - assert 5 == instance.num_retries + assert instance.num_retries == 5 class TestCredentialsToken: diff --git a/providers/tests/google/firebase/hooks/test_firestore.py b/providers/tests/google/firebase/hooks/test_firestore.py index b7308c5f9b9ce..7509f37611a0d 100644 --- a/providers/tests/google/firebase/hooks/test_firestore.py +++ b/providers/tests/google/firebase/hooks/test_firestore.py @@ -242,6 +242,7 @@ def test_create_build(self, mock_get_conn, mock_project_id): self.hook.export_documents(body={}) assert ( - "The project id must be passed either as keyword project_id parameter or as project_id extra in " - "Google Cloud connection definition. Both are not set!" == str(ctx.value) + str(ctx.value) + == "The project id must be passed either as keyword project_id parameter or as project_id extra in " + "Google Cloud connection definition. Both are not set!" ) diff --git a/providers/tests/google/suite/hooks/test_drive.py b/providers/tests/google/suite/hooks/test_drive.py index a6fb0391d2c8c..b52a64e1e89ce 100644 --- a/providers/tests/google/suite/hooks/test_drive.py +++ b/providers/tests/google/suite/hooks/test_drive.py @@ -121,7 +121,7 @@ def test_ensure_folders_exists_when_no_folder_exists(self, mock_get_conn): any_order=True, ) - assert "ID_4" == result_value + assert result_value == "ID_4" @mock.patch("airflow.providers.google.suite.hooks.drive.GoogleDriveHook.get_conn") def test_ensure_folders_exists_when_some_folders_exists(self, mock_get_conn): @@ -180,7 +180,7 @@ def test_ensure_folders_exists_when_some_folders_exists(self, mock_get_conn): any_order=True, ) - assert "ID_4" == result_value + assert result_value == "ID_4" @mock.patch("airflow.providers.google.suite.hooks.drive.GoogleDriveHook.get_conn") def test_ensure_folders_exists_when_all_folders_exists(self, mock_get_conn): @@ -215,7 +215,7 @@ def test_ensure_folders_exists_when_all_folders_exists(self, mock_get_conn): ) mock_get_conn.return_value.files.return_value.create.assert_not_called() - assert "ID_4" == result_value + assert result_value == "ID_4" @mock.patch("airflow.providers.google.suite.hooks.drive.GoogleDriveHook.get_file_id") @mock.patch("airflow.providers.google.suite.hooks.drive.GoogleDriveHook.get_conn") diff --git a/providers/tests/hashicorp/_internal_client/test_vault_client.py b/providers/tests/hashicorp/_internal_client/test_vault_client.py index f491f12129007..434b441e8964e 100644 --- a/providers/tests/hashicorp/_internal_client/test_vault_client.py +++ b/providers/tests/hashicorp/_internal_client/test_vault_client.py @@ -41,7 +41,7 @@ def test_custom_mount_point(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client vault_client = _VaultClient(auth_type="userpass", mount_point="custom") - assert "custom" == vault_client.mount_point + assert vault_client.mount_point == "custom" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_version_one_init(self, mock_hvac): @@ -49,7 +49,7 @@ def test_version_one_init(self, mock_hvac): mock_hvac.Client.return_value = mock_client vault_client = _VaultClient(auth_type="userpass", kv_engine_version=1) - assert 1 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 1 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_default_session_retry(self, mock_hvac): @@ -83,7 +83,7 @@ def test_approle(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.approle.login.assert_called_with(role_id="role", secret_id="pass") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_approle_different_auth_mount_point(self, mock_hvac): @@ -101,7 +101,7 @@ def test_approle_different_auth_mount_point(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.approle.login.assert_called_with(role_id="role", secret_id="pass", mount_point="other") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_approle_missing_role(self, mock_hvac): @@ -130,7 +130,7 @@ def test_aws_iam(self, mock_hvac): role="role", ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_aws_iam_different_auth_mount_point(self, mock_hvac): @@ -151,7 +151,7 @@ def test_aws_iam_different_auth_mount_point(self, mock_hvac): access_key="user", secret_key="pass", role="role", mount_point="other" ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_azure(self, mock_hvac): @@ -175,7 +175,7 @@ def test_azure(self, mock_hvac): client_secret="pass", ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_azure_different_auth_mount_point(self, mock_hvac): @@ -201,7 +201,7 @@ def test_azure_different_auth_mount_point(self, mock_hvac): mount_point="other", ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_azure_missing_resource(self, mock_hvac): @@ -255,7 +255,7 @@ def test_gcp(self, mock_hvac, mock_get_credentials, mock_get_scopes): credentials="credentials", ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes") @mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id") @@ -282,7 +282,7 @@ def test_gcp_different_auth_mount_point(self, mock_hvac, mock_get_credentials, m mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.gcp.configure.assert_called_with(credentials="credentials", mount_point="other") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes") @mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id") @@ -310,7 +310,7 @@ def test_gcp_dict(self, mock_hvac, mock_get_credentials, mock_get_scopes): credentials="credentials", ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_github(self, mock_hvac): @@ -323,7 +323,7 @@ def test_github(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.github.login.assert_called_with(token="s.7AU0I51yv1Q1lxOIg1F3ZRAS") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_github_different_auth_mount_point(self, mock_hvac): @@ -340,7 +340,7 @@ def test_github_different_auth_mount_point(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.github.login.assert_called_with(token="s.7AU0I51yv1Q1lxOIg1F3ZRAS", mount_point="other") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_github_missing_token(self, mock_hvac): @@ -364,7 +364,7 @@ def test_kubernetes_default_path(self, mock_kubernetes, mock_hvac): mock_kubernetes.assert_called_with(mock_client.adapter) mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.Kubernetes") @@ -385,7 +385,7 @@ def test_kubernetes(self, mock_kubernetes, mock_hvac): mock_kubernetes.assert_called_with(mock_client.adapter) mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.Kubernetes") @@ -409,7 +409,7 @@ def test_kubernetes_different_auth_mount_point(self, mock_kubernetes, mock_hvac) role="kube_role", jwt="data", mount_point="other" ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_kubernetes_missing_role(self, mock_hvac): @@ -441,7 +441,7 @@ def test_ldap(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.ldap.login.assert_called_with(username="user", password="pass") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_ldap_different_auth_mount_point(self, mock_hvac): @@ -459,7 +459,7 @@ def test_ldap_different_auth_mount_point(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.ldap.login.assert_called_with(username="user", password="pass", mount_point="other") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_radius_missing_host(self, mock_hvac): @@ -490,7 +490,7 @@ def test_radius(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=None) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_radius_different_auth_mount_point(self, mock_hvac): @@ -510,7 +510,7 @@ def test_radius_different_auth_mount_point(self, mock_hvac): host="radhost", secret="pass", port=None, mount_point="other" ) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_radius_port(self, mock_hvac): @@ -528,7 +528,7 @@ def test_radius_port(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=8110) client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_token_missing_token(self, mock_hvac): @@ -547,9 +547,9 @@ def test_token(self, mock_hvac): client = vault_client.client mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.is_authenticated.assert_called_with() - assert "s.7AU0I51yv1Q1lxOIg1F3ZRAS" == client.token - assert 2 == vault_client.kv_engine_version - assert "secret" == vault_client.mount_point + assert client.token == "s.7AU0I51yv1Q1lxOIg1F3ZRAS" + assert vault_client.kv_engine_version == 2 + assert vault_client.mount_point == "secret" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_token_in_env(self, mock_hvac, monkeypatch): @@ -560,9 +560,9 @@ def test_token_in_env(self, mock_hvac, monkeypatch): client = vault_client.client mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.is_authenticated.assert_called_with() - assert "s.7AU0I51yv1Q1lxOIg1F3ZRAS" == client.token - assert 2 == vault_client.kv_engine_version - assert "secret" == vault_client.mount_point + assert client.token == "s.7AU0I51yv1Q1lxOIg1F3ZRAS" + assert vault_client.kv_engine_version == 2 + assert vault_client.mount_point == "secret" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_token_path(self, mock_hvac): @@ -576,9 +576,9 @@ def test_token_path(self, mock_hvac): client = vault_client.client mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.is_authenticated.assert_called_with() - assert "s.7AU0I51yv1Q1lxOIg1F3ZRAS" == client.token - assert 2 == vault_client.kv_engine_version - assert "secret" == vault_client.mount_point + assert client.token == "s.7AU0I51yv1Q1lxOIg1F3ZRAS" + assert vault_client.kv_engine_version == 2 + assert vault_client.mount_point == "secret" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_token_path_strip(self, mock_hvac): @@ -592,9 +592,9 @@ def test_token_path_strip(self, mock_hvac): client = vault_client.client mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.is_authenticated.assert_called_with() - assert "s.7AU0I51yv1Q1lxOIg1F3ZRAS" == client.token - assert 2 == vault_client.kv_engine_version - assert "secret" == vault_client.mount_point + assert client.token == "s.7AU0I51yv1Q1lxOIg1F3ZRAS" + assert vault_client.kv_engine_version == 2 + assert vault_client.mount_point == "secret" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_default_auth_type(self, mock_hvac): @@ -606,10 +606,10 @@ def test_default_auth_type(self, mock_hvac): client = vault_client.client mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.is_authenticated.assert_called_with() - assert "s.7AU0I51yv1Q1lxOIg1F3ZRAS" == client.token - assert "token" == vault_client.auth_type - assert 2 == vault_client.kv_engine_version - assert "secret" == vault_client.mount_point + assert client.token == "s.7AU0I51yv1Q1lxOIg1F3ZRAS" + assert vault_client.auth_type == "token" + assert vault_client.kv_engine_version == 2 + assert vault_client.mount_point == "secret" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_userpass(self, mock_hvac): @@ -622,7 +622,7 @@ def test_userpass(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.userpass.login.assert_called_with(username="user", password="pass") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_userpass_different_auth_mount_point(self, mock_hvac): @@ -640,7 +640,7 @@ def test_userpass_different_auth_mount_point(self, mock_hvac): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) client.auth.userpass.login.assert_called_with(username="user", password="pass", mount_point="other") client.is_authenticated.assert_called_with() - assert 2 == vault_client.kv_engine_version + assert vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_non_existing_key_v2(self, mock_hvac): @@ -672,7 +672,7 @@ def test_get_non_existing_key_v2_different_auth(self, mock_hvac): ) secret = vault_client.get_secret(secret_path="missing") assert secret is None - assert "secret" == vault_client.mount_point + assert vault_client.mount_point == "secret" mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="secret", path="missing", version=None, raise_on_deleted_version=True ) @@ -727,7 +727,7 @@ def test_get_existing_key_v2(self, mock_hvac): url="http://localhost:8180", ) secret = vault_client.get_secret(secret_path="path/to/secret") - assert {"secret_key": "secret_value"} == secret + assert secret == {"secret_key": "secret_value"} mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="secret", path="path/to/secret", version=None, raise_on_deleted_version=True ) @@ -765,7 +765,7 @@ def test_get_existing_key_v2_without_preconfigured_mount_point(self, mock_hvac): mount_point=None, ) secret = vault_client.get_secret(secret_path="mount_point/path/to/secret") - assert {"secret_key": "secret_value"} == secret + assert secret == {"secret_key": "secret_value"} mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="mount_point", path="path/to/secret", version=None, raise_on_deleted_version=True ) @@ -802,7 +802,7 @@ def test_get_existing_key_v2_version(self, mock_hvac): url="http://localhost:8180", ) secret = vault_client.get_secret(secret_path="missing", secret_version=1) - assert {"secret_key": "secret_value"} == secret + assert secret == {"secret_key": "secret_value"} mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="secret", path="missing", version=1, raise_on_deleted_version=True ) @@ -832,7 +832,7 @@ def test_get_existing_key_v1(self, mock_hvac): url="http://localhost:8180", ) secret = vault_client.get_secret(secret_path="/path/to/secret") - assert {"value": "world"} == secret + assert secret == {"value": "world"} mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point="secret", path="/path/to/secret" ) @@ -863,7 +863,7 @@ def test_get_existing_key_v1_ssl_verify_false(self, mock_hvac): verify=False, ) secret = vault_client.get_secret(secret_path="/path/to/secret") - assert {"value": "world"} == secret + assert secret == {"value": "world"} assert not vault_client.kwargs["session"].verify mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point="secret", path="/path/to/secret" @@ -895,8 +895,8 @@ def test_get_existing_key_v1_trust_private_ca(self, mock_hvac): verify="/etc/ssl/certificates/ca-bundle.pem", ) secret = vault_client.get_secret(secret_path="/path/to/secret") - assert {"value": "world"} == secret - assert "/etc/ssl/certificates/ca-bundle.pem" == vault_client.kwargs["session"].verify + assert secret == {"value": "world"} + assert vault_client.kwargs["session"].verify == "/etc/ssl/certificates/ca-bundle.pem" mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point="secret", path="/path/to/secret" ) @@ -927,7 +927,7 @@ def test_get_existing_key_v1_without_preconfigured_mount_point(self, mock_hvac): mount_point=None, ) secret = vault_client.get_secret(secret_path="mount_point/path/to/secret") - assert {"value": "world"} == secret + assert secret == {"value": "world"} mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point="mount_point", path="path/to/secret" ) @@ -958,7 +958,7 @@ def test_get_existing_key_v1_different_auth_mount_point(self, mock_hvac): url="http://localhost:8180", ) secret = vault_client.get_secret(secret_path="missing") - assert {"value": "world"} == secret + assert secret == {"value": "world"} mock_client.secrets.kv.v1.read_secret.assert_called_once_with(mount_point="secret", path="missing") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -1002,7 +1002,7 @@ def test_get_secret_metadata_v2(self, mock_hvac): auth_type="token", token="s.7AU0I51yv1Q1lxOIg1F3ZRAS", url="http://localhost:8180" ) metadata = vault_client.get_secret_metadata(secret_path="missing") - assert { + assert metadata == { "request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae", "lease_id": "", "renewable": False, @@ -1021,7 +1021,7 @@ def test_get_secret_metadata_v2(self, mock_hvac): "version": 2, }, ], - } == metadata + } mock_client.secrets.kv.v2.read_secret_metadata.assert_called_once_with( mount_point="secret", path="missing" ) @@ -1073,7 +1073,7 @@ def test_get_secret_including_metadata_v2(self, mock_hvac): url="http://localhost:8180", ) metadata = vault_client.get_secret_including_metadata(secret_path="missing") - assert { + assert metadata == { "request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae", "lease_id": "", "renewable": False, @@ -1090,7 +1090,7 @@ def test_get_secret_including_metadata_v2(self, mock_hvac): "wrap_info": None, "warnings": None, "auth": None, - } == metadata + } mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="secret", path="missing", version=None, raise_on_deleted_version=True ) diff --git a/providers/tests/hashicorp/hooks/test_vault.py b/providers/tests/hashicorp/hooks/test_vault.py index ed9587cd2534c..f835382b1200a 100644 --- a/providers/tests/hashicorp/hooks/test_vault.py +++ b/providers/tests/hashicorp/hooks/test_vault.py @@ -102,7 +102,7 @@ def test_custom_mount_point_dejson(self, mock_hvac, mock_get_connection): "vault_conn_id": "vault_conn_id", } test_hook = VaultHook(**kwargs) - assert "custom" == test_hook.vault_client.mount_point + assert test_hook.vault_client.mount_point == "custom" @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -119,8 +119,8 @@ def test_custom_auth_mount_point_init_params(self, mock_hvac, mock_get_connectio mock_connection.extra_dejson.get.side_effect = connection_dict.get kwargs = {"vault_conn_id": "vault_conn_id", "auth_mount_point": "custom"} test_hook = VaultHook(**kwargs) - assert "secret" == test_hook.vault_client.mount_point - assert "custom" == test_hook.vault_client.auth_mount_point + assert test_hook.vault_client.mount_point == "secret" + assert test_hook.vault_client.auth_mount_point == "custom" @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -137,8 +137,8 @@ def test_custom_auth_mount_point_dejson(self, mock_hvac, mock_get_connection): "vault_conn_id": "vault_conn_id", } test_hook = VaultHook(**kwargs) - assert "secret" == test_hook.vault_client.mount_point - assert "custom" == test_hook.vault_client.auth_mount_point + assert test_hook.vault_client.mount_point == "secret" + assert test_hook.vault_client.auth_mount_point == "custom" @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -158,7 +158,7 @@ def test_version_one_dejson(self, mock_hvac, mock_get_connection): "vault_conn_id": "vault_conn_id", } test_hook = VaultHook(**kwargs) - assert 1 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 1 @pytest.mark.parametrize( "protocol, expected_url", @@ -192,7 +192,7 @@ def test_protocol(self, mock_hvac, mock_get_connection, protocol, expected_url): mock_hvac.Client.assert_called_with(url=expected_url, session=None) test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @pytest.mark.parametrize( "use_tls, expected_url", @@ -225,7 +225,7 @@ def test_protocol_via_use_tls(self, mock_hvac, mock_get_connection, use_tls, exp mock_hvac.Client.assert_called_with(url=expected_url, session=None) test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -251,7 +251,7 @@ def test_approle_init_params(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -275,7 +275,7 @@ def test_approle_dejson(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @mock.patch.dict( @@ -288,7 +288,7 @@ def test_approle_uri(self, mock_hvac): mock_hvac.Client.assert_called_with(url="https://vault.example.com", session=None) test_client.auth.approle.login.assert_called_with(role_id="role", secret_id="secret") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -318,7 +318,7 @@ def test_aws_iam_init_params(self, mock_hvac, mock_get_connection): role="role", ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -361,7 +361,7 @@ def test_aws_uri(self, mock_hvac): role="role", ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -393,7 +393,7 @@ def test_azure_init_params(self, mock_hvac, mock_get_connection): client_secret="pass", ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -426,7 +426,7 @@ def test_azure_dejson(self, mock_hvac, mock_get_connection): client_secret="pass", ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes") @mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id") @@ -463,7 +463,7 @@ def test_gcp_init_params(self, mock_hvac, mock_get_connection, mock_get_credenti credentials="credentials", ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes") @mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id") @@ -501,7 +501,7 @@ def test_gcp_dejson(self, mock_hvac, mock_get_connection, mock_get_credentials, credentials="credentials", ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes") @mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id") @@ -539,7 +539,7 @@ def test_gcp_dict_dejson(self, mock_hvac, mock_get_connection, mock_get_credenti credentials="credentials", ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -564,7 +564,7 @@ def test_github_init_params(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.github.login.assert_called_with(token="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -590,7 +590,7 @@ def test_github_dejson(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.github.login.assert_called_with(token="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -620,7 +620,7 @@ def test_kubernetes_default_path(self, mock_kubernetes, mock_hvac, mock_get_conn mock_kubernetes.assert_called_with(mock_client.adapter) mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -651,7 +651,7 @@ def test_kubernetes_init_params(self, mock_kubernetes, mock_hvac, mock_get_conne mock_kubernetes.assert_called_with(mock_client.adapter) mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -681,7 +681,7 @@ def test_kubernetes_dejson(self, mock_kubernetes, mock_hvac, mock_get_connection mock_kubernetes.assert_called_with(mock_client.adapter) mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -712,7 +712,7 @@ def test_client_kwargs(self, mock_hvac, mock_get_connection): session=None, ) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -737,7 +737,7 @@ def test_ldap_init_params(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.ldap.login.assert_called_with(username="user", password="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -763,7 +763,7 @@ def test_ldap_dejson(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.ldap.login.assert_called_with(username="user", password="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -789,7 +789,7 @@ def test_radius_init_params(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=None) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -816,7 +816,7 @@ def test_radius_init_params_port(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=8123) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -844,7 +844,7 @@ def test_radius_dejson(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=8123) test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -888,9 +888,9 @@ def test_token_init_params(self, mock_hvac, mock_get_connection): test_client = test_hook.get_conn() mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.is_authenticated.assert_called_with() - assert "pass" == test_client.token - assert 2 == test_hook.vault_client.kv_engine_version - assert "secret" == test_hook.vault_client.mount_point + assert test_client.token == "pass" + assert test_hook.vault_client.kv_engine_version == 2 + assert test_hook.vault_client.mount_point == "secret" @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -915,8 +915,8 @@ def test_token_dejson(self, mock_hvac, mock_get_connection): test_client = test_hook.get_conn() mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.is_authenticated.assert_called_with() - assert "pass" == test_client.token - assert 2 == test_hook.vault_client.kv_engine_version + assert test_client.token == "pass" + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -942,7 +942,7 @@ def test_userpass_init_params(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.userpass.login.assert_called_with(username="user", password="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -968,7 +968,7 @@ def test_userpass_dejson(self, mock_hvac, mock_get_connection): mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None) test_client.auth.userpass.login.assert_called_with(username="user", password="pass") test_client.is_authenticated.assert_called_with() - assert 2 == test_hook.vault_client.kv_engine_version + assert test_hook.vault_client.kv_engine_version == 2 @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @@ -1004,7 +1004,7 @@ def test_get_existing_key_v2(self, mock_hvac, mock_get_connection): test_hook = VaultHook(**kwargs) secret = test_hook.get_secret(secret_path="missing") - assert {"secret_key": "secret_value"} == secret + assert secret == {"secret_key": "secret_value"} mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="secret", path="missing", version=None, raise_on_deleted_version=True ) @@ -1043,7 +1043,7 @@ def test_get_existing_key_v2_version(self, mock_hvac, mock_get_connection): test_hook = VaultHook(**kwargs) secret = test_hook.get_secret(secret_path="missing", secret_version=1) - assert {"secret_key": "secret_value"} == secret + assert secret == {"secret_key": "secret_value"} mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="secret", path="missing", version=1, raise_on_deleted_version=True ) @@ -1074,7 +1074,7 @@ def test_get_existing_key_v1(self, mock_hvac, mock_get_connection): test_hook = VaultHook(**kwargs) secret = test_hook.get_secret(secret_path="missing") - assert {"value": "world"} == secret + assert secret == {"value": "world"} mock_client.secrets.kv.v1.read_secret.assert_called_once_with(mount_point="secret", path="missing") @mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection") @@ -1113,7 +1113,7 @@ def test_get_secret_metadata_v2(self, mock_hvac, mock_get_connection): test_hook = VaultHook(**kwargs) metadata = test_hook.get_secret_metadata(secret_path="missing") - assert { + assert metadata == { "request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae", "lease_id": "", "renewable": False, @@ -1132,7 +1132,7 @@ def test_get_secret_metadata_v2(self, mock_hvac, mock_get_connection): "version": 2, }, ], - } == metadata + } mock_client.secrets.kv.v2.read_secret_metadata.assert_called_once_with( mount_point="secret", path="missing" ) @@ -1171,7 +1171,7 @@ def test_get_secret_including_metadata_v2(self, mock_hvac, mock_get_connection): test_hook = VaultHook(**kwargs) metadata = test_hook.get_secret_including_metadata(secret_path="missing") - assert { + assert metadata == { "request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae", "lease_id": "", "renewable": False, @@ -1188,7 +1188,7 @@ def test_get_secret_including_metadata_v2(self, mock_hvac, mock_get_connection): "wrap_info": None, "warnings": None, "auth": None, - } == metadata + } mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point="secret", path="missing", version=None, raise_on_deleted_version=True ) @@ -1302,7 +1302,7 @@ def test_config_from_secret_backend(self, mock_hvac): ("test", "sql_alchemy_conn"), } - assert "sqlite:////Users/airflow/airflow/airflow.db" == test_conf.get("test", "sql_alchemy_conn") + assert test_conf.get("test", "sql_alchemy_conn") == "sqlite:////Users/airflow/airflow/airflow.db" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") @conf_vars( diff --git a/providers/tests/hashicorp/secrets/test_vault.py b/providers/tests/hashicorp/secrets/test_vault.py index d3d089553af2c..14834e2cbd07a 100644 --- a/providers/tests/hashicorp/secrets/test_vault.py +++ b/providers/tests/hashicorp/secrets/test_vault.py @@ -63,7 +63,7 @@ def test_get_conn_uri(self, mock_hvac): match="Method `VaultBackend.get_conn_uri` is deprecated and will be removed in a future release.", ): returned_uri = test_client.get_conn_uri(conn_id="test_postgres") - assert "postgresql://airflow:airflow@host:5432/airflow" == returned_uri + assert returned_uri == "postgresql://airflow:airflow@host:5432/airflow" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_without_predefined_mount_point(self, mock_hvac): @@ -102,7 +102,7 @@ def test_get_conn_uri_without_predefined_mount_point(self, mock_hvac): match="Method `VaultBackend.get_conn_uri` is deprecated and will be removed in a future release.", ): returned_uri = test_client.get_conn_uri(conn_id="airflow/test_postgres") - assert "postgresql://airflow:airflow@host:5432/airflow" == returned_uri + assert returned_uri == "postgresql://airflow:airflow@host:5432/airflow" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_connection(self, mock_hvac): @@ -145,7 +145,7 @@ def test_get_connection(self, mock_hvac): test_client = VaultBackend(**kwargs) connection = test_client.get_connection(conn_id="test_postgres") - assert "postgresql://airflow:airflow@host:5432/airflow?foo=bar&baz=taz" == connection.get_uri() + assert connection.get_uri() == "postgresql://airflow:airflow@host:5432/airflow?foo=bar&baz=taz" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_connection_without_predefined_mount_point(self, mock_hvac): @@ -188,7 +188,7 @@ def test_get_connection_without_predefined_mount_point(self, mock_hvac): test_client = VaultBackend(**kwargs) connection = test_client.get_connection(conn_id="airflow/test_postgres") - assert "postgresql://airflow:airflow@host:5432/airflow?foo=bar&baz=taz" == connection.get_uri() + assert connection.get_uri() == "postgresql://airflow:airflow@host:5432/airflow?foo=bar&baz=taz" @pytest.mark.parametrize( "mount_point, connections_path, conn_id, expected_args", @@ -252,7 +252,7 @@ def test_get_conn_uri_engine_version_1( ): returned_uri = test_client.get_conn_uri(conn_id=conn_id) mock_client.secrets.kv.v1.read_secret.assert_called_once_with(**expected_args) - assert "postgresql://airflow:airflow@host:5432/airflow" == returned_uri + assert returned_uri == "postgresql://airflow:airflow@host:5432/airflow" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_engine_version_1_custom_auth_mount_point(self, mock_hvac): @@ -280,7 +280,7 @@ def test_get_conn_uri_engine_version_1_custom_auth_mount_point(self, mock_hvac): } test_client = VaultBackend(**kwargs) - assert "custom" == test_client.vault_client.auth_mount_point + assert test_client.vault_client.auth_mount_point == "custom" with pytest.warns( AirflowProviderDeprecationWarning, match="Method `VaultBackend.get_conn_uri` is deprecated and will be removed in a future release.", @@ -289,7 +289,7 @@ def test_get_conn_uri_engine_version_1_custom_auth_mount_point(self, mock_hvac): mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point="airflow", path="connections/test_postgres" ) - assert "postgresql://airflow:airflow@host:5432/airflow" == returned_uri + assert returned_uri == "postgresql://airflow:airflow@host:5432/airflow" @mock.patch.dict( "os.environ", @@ -360,7 +360,7 @@ def test_get_variable_value(self, mock_hvac): test_client = VaultBackend(**kwargs) returned_uri = test_client.get_variable("hello") - assert "world" == returned_uri + assert returned_uri == "world" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_variable_value_without_predefined_mount_point(self, mock_hvac): @@ -395,7 +395,7 @@ def test_get_variable_value_without_predefined_mount_point(self, mock_hvac): test_client = VaultBackend(**kwargs) returned_uri = test_client.get_variable("airflow/hello") - assert "world" == returned_uri + assert returned_uri == "world" @pytest.mark.parametrize( "mount_point, variables_path, variable_key, expected_args", @@ -445,7 +445,7 @@ def test_get_variable_value_engine_version_1( test_client = VaultBackend(**kwargs) returned_uri = test_client.get_variable(variable_key) mock_client.secrets.kv.v1.read_secret.assert_called_once_with(**expected_args) - assert "world" == returned_uri + assert returned_uri == "world" @mock.patch.dict( "os.environ", @@ -541,7 +541,7 @@ def test_get_config_value(self, mock_hvac): test_client = VaultBackend(**kwargs) returned_uri = test_client.get_config("sql_alchemy_conn") - assert "sqlite:////Users/airflow/airflow/airflow.db" == returned_uri + assert returned_uri == "sqlite:////Users/airflow/airflow/airflow.db" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_config_value_without_predefined_mount_point(self, mock_hvac): @@ -576,7 +576,7 @@ def test_get_config_value_without_predefined_mount_point(self, mock_hvac): test_client = VaultBackend(**kwargs) returned_uri = test_client.get_config("airflow/sql_alchemy_conn") - assert "sqlite:////Users/airflow/airflow/airflow.db" == returned_uri + assert returned_uri == "sqlite:////Users/airflow/airflow/airflow.db" @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_connections_path_none_value(self, mock_hvac): diff --git a/providers/tests/integration/apache/pinot/hooks/test_pinot.py b/providers/tests/integration/apache/pinot/hooks/test_pinot.py index 432521f0aeeb4..50baba4409502 100644 --- a/providers/tests/integration/apache/pinot/hooks/test_pinot.py +++ b/providers/tests/integration/apache/pinot/hooks/test_pinot.py @@ -33,4 +33,4 @@ def test_should_return_records(self): hook = PinotDbApiHook() sql = "select playerName from baseballStats ORDER BY playerName limit 5" records = hook.get_records(sql) - assert [["A. Harry"], ["A. Harry"], ["Aaron"], ["Aaron Albert"], ["Aaron Albert"]] == records + assert records == [["A. Harry"], ["A. Harry"], ["Aaron"], ["Aaron Albert"], ["Aaron Albert"]] diff --git a/providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py b/providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py index 919d5387a2afd..02035ce24452d 100644 --- a/providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py +++ b/providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py @@ -79,10 +79,10 @@ def test_execute(self, gcs_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert JSON_FILENAME.format(0) == obj - assert "application/json" == mime_type - assert GZIP == gzip + assert mime_type == "application/json" + assert gzip == GZIP with open(tmp_filename, "rb") as file: assert b"".join(NDJSON_LINES) == file.read() diff --git a/providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py b/providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py index 05b16162b6945..3ae0fba70f844 100644 --- a/providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py +++ b/providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py @@ -67,9 +67,9 @@ def test_init(self): @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") def test_save_as_json(self, mock_gcs_hook, mock_trino_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert FILENAME.format(0) == obj - assert "application/json" == mime_type + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(NDJSON_LINES) == file.read() @@ -121,8 +121,8 @@ def test_save_as_json_with_file_splitting(self, mock_gcs_hook, mock_trino_hook): } def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket - assert "application/json" == mime_type + assert bucket == BUCKET + assert mime_type == "application/json" assert not gzip with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -163,7 +163,7 @@ def test_save_as_json_with_schema_file(self, mock_gcs_hook, mock_trino_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: - assert SCHEMA_JSON == file.read() + assert file.read() == SCHEMA_JSON mock_gcs_hook.return_value.upload.side_effect = _assert_upload @@ -194,15 +194,15 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == mock_gcs_hook.return_value.upload.call_count + assert mock_gcs_hook.return_value.upload.call_count == 2 @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") def test_save_as_csv(self, mock_trino_hook, mock_gcs_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket + assert bucket == BUCKET assert FILENAME.format(0) == obj - assert "text/csv" == mime_type + assert mime_type == "text/csv" assert not gzip with open(tmp_filename, "rb") as file: assert b"".join(CSV_LINES) == file.read() @@ -255,8 +255,8 @@ def test_save_as_csv_with_file_splitting(self, mock_gcs_hook, mock_trino_hook): } def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): - assert BUCKET == bucket - assert "text/csv" == mime_type + assert bucket == BUCKET + assert mime_type == "text/csv" assert not gzip with open(tmp_filename, "rb") as file: assert expected_upload[obj] == file.read() @@ -298,7 +298,7 @@ def test_save_as_csv_with_schema_file(self, mock_gcs_hook, mock_trino_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: - assert SCHEMA_JSON == file.read() + assert file.read() == SCHEMA_JSON mock_gcs_hook.return_value.upload.side_effect = _assert_upload @@ -327,4 +327,4 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip, metadata=None): op.execute(None) # once for the file and once for the schema - assert 2 == mock_gcs_hook.return_value.upload.call_count + assert mock_gcs_hook.return_value.upload.call_count == 2 diff --git a/providers/tests/integration/trino/hooks/test_trino.py b/providers/tests/integration/trino/hooks/test_trino.py index 1fc7cbc3d50b3..023989605f0b1 100644 --- a/providers/tests/integration/trino/hooks/test_trino.py +++ b/providers/tests/integration/trino/hooks/test_trino.py @@ -32,7 +32,7 @@ def test_should_record_records(self): hook = TrinoHook() sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3" records = hook.get_records(sql) - assert [["Customer#000000001"], ["Customer#000000002"], ["Customer#000000003"]] == records + assert records == [["Customer#000000001"], ["Customer#000000002"], ["Customer#000000003"]] @pytest.mark.integration("kerberos") def test_should_record_records_with_kerberos_auth(self): @@ -46,7 +46,7 @@ def test_should_record_records_with_kerberos_auth(self): hook = TrinoHook() sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3" records = hook.get_records(sql) - assert [["Customer#000000001"], ["Customer#000000002"], ["Customer#000000003"]] == records + assert records == [["Customer#000000001"], ["Customer#000000002"], ["Customer#000000003"]] @mock.patch.dict("os.environ", AIRFLOW_CONN_TRINO_DEFAULT="trino://airflow@trino:8080/") def test_openlineage_methods(self): diff --git a/providers/tests/microsoft/azure/secrets/test_key_vault.py b/providers/tests/microsoft/azure/secrets/test_key_vault.py index 2c791f87a387a..60040804e1000 100644 --- a/providers/tests/microsoft/azure/secrets/test_key_vault.py +++ b/providers/tests/microsoft/azure/secrets/test_key_vault.py @@ -75,7 +75,7 @@ def test_get_variable(self, mock_client): backend = AzureKeyVaultBackend() returned_uri = backend.get_variable("hello") mock_client.get_secret.assert_called_with(name="airflow-variables-hello") - assert "world" == returned_uri + assert returned_uri == "world" @mock.patch(f"{KEY_VAULT_MODULE}.AzureKeyVaultBackend.client") def test_get_variable_non_existent_key(self, mock_client): diff --git a/providers/tests/mongo/hooks/test_mongo.py b/providers/tests/mongo/hooks/test_mongo.py index 5a284b69cc2af..f02ad477b4de9 100644 --- a/providers/tests/mongo/hooks/test_mongo.py +++ b/providers/tests/mongo/hooks/test_mongo.py @@ -171,7 +171,7 @@ def test_update_one(self): self.hook.update_one(collection, filter_doc, update_doc) result_obj = collection.find_one(filter="1") - assert 123 == result_obj["field"] + assert result_obj["field"] == 123 def test_update_one_with_upsert(self): collection = mongomock.MongoClient().db.collection @@ -182,7 +182,7 @@ def test_update_one_with_upsert(self): self.hook.update_one(collection, filter_doc, update_doc, upsert=True) result_obj = collection.find_one(filter="1") - assert 123 == result_obj["field"] + assert result_obj["field"] == 123 def test_update_many(self): collection = mongomock.MongoClient().db.collection @@ -196,10 +196,10 @@ def test_update_many(self): self.hook.update_many(collection, filter_doc, update_doc) result_obj = collection.find_one(filter="1") - assert 123 == result_obj["field"] + assert result_obj["field"] == 123 result_obj = collection.find_one(filter="2") - assert 123 == result_obj["field"] + assert result_obj["field"] == 123 def test_update_many_with_upsert(self): collection = mongomock.MongoClient().db.collection @@ -210,7 +210,7 @@ def test_update_many_with_upsert(self): self.hook.update_many(collection, filter_doc, update_doc, upsert=True) result_obj = collection.find_one(filter="1") - assert 123 == result_obj["field"] + assert result_obj["field"] == 123 def test_replace_one(self): collection = mongomock.MongoClient().db.collection @@ -222,11 +222,11 @@ def test_replace_one(self): self.hook.replace_one(collection, obj1) result_obj = collection.find_one(filter="1") - assert "test_value_1_updated" == result_obj["field"] + assert result_obj["field"] == "test_value_1_updated" # Other document should stay intact result_obj = collection.find_one(filter="2") - assert "test_value_2" == result_obj["field"] + assert result_obj["field"] == "test_value_2" def test_replace_one_with_filter(self): collection = mongomock.MongoClient().db.collection @@ -238,11 +238,11 @@ def test_replace_one_with_filter(self): self.hook.replace_one(collection, obj1, {"field": "test_value_1"}) result_obj = collection.find_one(filter="1") - assert "test_value_1_updated" == result_obj["field"] + assert result_obj["field"] == "test_value_1_updated" # Other document should stay intact result_obj = collection.find_one(filter="2") - assert "test_value_2" == result_obj["field"] + assert result_obj["field"] == "test_value_2" def test_replace_one_with_upsert(self): collection = mongomock.MongoClient().db.collection @@ -251,7 +251,7 @@ def test_replace_one_with_upsert(self): self.hook.replace_one(collection, obj, upsert=True) result_obj = collection.find_one(filter="1") - assert "test_value_1" == result_obj["field"] + assert result_obj["field"] == "test_value_1" def test_replace_many(self): collection = mongomock.MongoClient().db.collection @@ -264,10 +264,10 @@ def test_replace_many(self): self.hook.replace_many(collection, [obj1, obj2]) result_obj = collection.find_one(filter="1") - assert "test_value_1_updated" == result_obj["field"] + assert result_obj["field"] == "test_value_1_updated" result_obj = collection.find_one(filter="2") - assert "test_value_2_updated" == result_obj["field"] + assert result_obj["field"] == "test_value_2_updated" def test_replace_many_with_upsert(self): collection = mongomock.MongoClient().db.collection @@ -277,10 +277,10 @@ def test_replace_many_with_upsert(self): self.hook.replace_many(collection, [obj1, obj2], upsert=True) result_obj = collection.find_one(filter="1") - assert "test_value_1" == result_obj["field"] + assert result_obj["field"] == "test_value_1" result_obj = collection.find_one(filter="2") - assert "test_value_2" == result_obj["field"] + assert result_obj["field"] == "test_value_2" def test_create_uri_with_all_creds(self): self.hook.connection.login = "test_user" @@ -312,7 +312,7 @@ def test_delete_one(self): self.hook.delete_one(collection, {"_id": "1"}) - assert 0 == collection.count_documents({}) + assert collection.count_documents({}) == 0 def test_delete_many(self): collection = mongomock.MongoClient().db.collection @@ -322,7 +322,7 @@ def test_delete_many(self): self.hook.delete_many(collection, {"field": "value"}) - assert 0 == collection.count_documents({}) + assert collection.count_documents({}) == 0 def test_find_one(self): collection = mongomock.MongoClient().db.collection diff --git a/providers/tests/mysql/hooks/test_mysql.py b/providers/tests/mysql/hooks/test_mysql.py index 6643c6f5b407d..0e21047b107b5 100644 --- a/providers/tests/mysql/hooks/test_mysql.py +++ b/providers/tests/mysql/hooks/test_mysql.py @@ -305,7 +305,7 @@ def test_bulk_dump(self): self.cur.execute.assert_called_once_with("SELECT * INTO OUTFILE %s FROM table", ("/tmp/file",)) def test_serialize_cell(self): - assert "foo" == self.db_hook._serialize_cell("foo", None) + assert self.db_hook._serialize_cell("foo", None) == "foo" def test_bulk_load_custom(self): self.db_hook.bulk_load_custom( diff --git a/providers/tests/opensearch/log/test_os_task_handler.py b/providers/tests/opensearch/log/test_os_task_handler.py index af534dfbd5902..97e3054ddef47 100644 --- a/providers/tests/opensearch/log/test_os_task_handler.py +++ b/providers/tests/opensearch/log/test_os_task_handler.py @@ -196,7 +196,7 @@ def test_read(self, ti): ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert len(logs[0]) == 1 assert ( @@ -215,7 +215,7 @@ def test_read_with_patterns(self, ti): ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert len(logs[0]) == 1 assert ( @@ -244,11 +244,11 @@ def test_read_with_patterns_no_match(self, ti): ti, 1, {"offset": 0, "last_log_timestamp": str(ts), "end_of_log": False} ) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) - assert [[]] == logs + assert logs == [[]] assert not metadatas[0]["end_of_log"] - assert "0" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "0" # last_log_timestamp won't change if no log lines read. assert timezone.parse(metadatas[0]["last_log_timestamp"]) == ts @@ -288,7 +288,7 @@ def test_read_missing_logs(self, seconds, create_task_instance): ): logs, metadatas = self.os_task_handler.read(ti, 1, {"offset": 0, "last_log_timestamp": str(ts)}) - assert 1 == len(logs) + assert len(logs) == 1 if seconds > 5: # we expect a log not found message when checking began more than 5 seconds ago assert len(logs[0]) == 1 @@ -302,12 +302,12 @@ def test_read_missing_logs(self, seconds, create_task_instance): assert logs == [[]] assert metadatas[0]["end_of_log"] is False assert len(logs) == len(metadatas) - assert "0" == metadatas[0]["offset"] + assert metadatas[0]["offset"] == "0" assert timezone.parse(metadatas[0]["last_log_timestamp"]) == ts def test_read_with_none_metadata(self, ti): logs, metadatas = self.os_task_handler.read(ti, 1) - assert 1 == len(logs) + assert len(logs) == 1 assert len(logs) == len(metadatas) assert ( logs[0][0][-1] == "Dependencies all met for dep_context=non-requeueable" @@ -362,7 +362,7 @@ def test_close_closed(self, ti): with open( os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: - assert 0 == len(log_file.read()) + assert len(log_file.read()) == 0 def test_close_with_no_handler(self, ti): self.os_task_handler.set_context(ti) @@ -371,7 +371,7 @@ def test_close_with_no_handler(self, ti): with open( os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: - assert 0 == len(log_file.read()) + assert len(log_file.read()) == 0 assert self.os_task_handler.closed def test_close_with_no_stream(self, ti): @@ -394,15 +394,15 @@ def test_close_with_no_stream(self, ti): assert self.os_task_handler.closed def test_render_log_id(self, ti): - assert self.LOG_ID == self.os_task_handler._render_log_id(ti, 1) + assert self.os_task_handler._render_log_id(ti, 1) == self.LOG_ID self.os_task_handler.json_format = True - assert self.JSON_LOG_ID == self.os_task_handler._render_log_id(ti, 1) + assert self.os_task_handler._render_log_id(ti, 1) == self.JSON_LOG_ID # def test_clean_date(self): clean_execution_date = self.os_task_handler._clean_date(datetime(2016, 7, 8, 9, 10, 11, 12)) - assert "2016_07_08T09_10_11_000012" == clean_execution_date + assert clean_execution_date == "2016_07_08T09_10_11_000012" @mock.patch("sys.__stdout__", new_callable=StringIO) def test_dynamic_offset(self, stdout_mock, ti, time_machine): diff --git a/providers/tests/opsgenie/hooks/test_opsgenie.py b/providers/tests/opsgenie/hooks/test_opsgenie.py index 1cdd024ced674..deadb98343b9e 100644 --- a/providers/tests/opsgenie/hooks/test_opsgenie.py +++ b/providers/tests/opsgenie/hooks/test_opsgenie.py @@ -81,11 +81,11 @@ def setup_method(self): def test_get_api_key(self): hook = OpsgenieAlertHook(opsgenie_conn_id=self.conn_id) api_key = hook._get_api_key() - assert "eb243592-faa2-4ba2-a551q-1afdf565c889" == api_key + assert api_key == "eb243592-faa2-4ba2-a551q-1afdf565c889" def test_get_conn_defaults_host(self): hook = OpsgenieAlertHook() - assert "https://api.opsgenie.com" == hook.get_conn().api_client.configuration.host + assert hook.get_conn().api_client.configuration.host == "https://api.opsgenie.com" def test_get_conn_custom_host(self): conn_id = "custom_host_opsgenie_test" @@ -99,7 +99,7 @@ def test_get_conn_custom_host(self): ) hook = OpsgenieAlertHook(conn_id) - assert "https://app.eu.opsgenie.com" == hook.get_conn().api_client.configuration.host + assert hook.get_conn().api_client.configuration.host == "https://app.eu.opsgenie.com" def test_verify_api_key_set(self): hook = OpsgenieAlertHook(opsgenie_conn_id=self.conn_id) diff --git a/providers/tests/opsgenie/operators/test_opsgenie.py b/providers/tests/opsgenie/operators/test_opsgenie.py index 33a1766025979..4fda7e1b60412 100644 --- a/providers/tests/opsgenie/operators/test_opsgenie.py +++ b/providers/tests/opsgenie/operators/test_opsgenie.py @@ -94,7 +94,7 @@ def test_properties(self): # Given / When operator = OpsgenieCreateAlertOperator(task_id="opsgenie_alert_job", dag=self.dag, **self._config) - assert "opsgenie_default" == operator.opsgenie_conn_id + assert operator.opsgenie_conn_id == "opsgenie_default" assert self._config["message"] == operator.message assert self._config["alias"] == operator.alias assert self._config["description"] == operator.description @@ -138,7 +138,7 @@ def test_properties(self): task_id="opsgenie_test_properties_job", identifier="id", dag=self.dag, **self._config ) - assert "opsgenie_default" == operator.opsgenie_conn_id + assert operator.opsgenie_conn_id == "opsgenie_default" assert self._config["user"] == operator.user assert self._config["note"] == operator.note assert self._config["source"] == operator.source diff --git a/providers/tests/papermill/operators/test_papermill.py b/providers/tests/papermill/operators/test_papermill.py index 5c51e814a9b8f..94d0b980ef641 100644 --- a/providers/tests/papermill/operators/test_papermill.py +++ b/providers/tests/papermill/operators/test_papermill.py @@ -208,5 +208,5 @@ def test_render_template(self, create_task_instance_of_operator): assert task.output_nb == "/tmp/out-test_render_template.ipynb" # Test render other templated attributes - assert "python3" == task.kernel_name - assert "python" == task.language_name + assert task.kernel_name == "python3" + assert task.language_name == "python" diff --git a/providers/tests/postgres/hooks/test_postgres.py b/providers/tests/postgres/hooks/test_postgres.py index fa65bf59f9e00..23611a9c70fca 100644 --- a/providers/tests/postgres/hooks/test_postgres.py +++ b/providers/tests/postgres/hooks/test_postgres.py @@ -324,7 +324,7 @@ def test_get_uri_from_connection_without_database_override(self): port=1, ) ) - assert "postgresql://login:password@host:1/database" == self.db_hook.get_uri() + assert self.db_hook.get_uri() == "postgresql://login:password@host:1/database" def test_get_uri_from_connection_with_database_override(self): hook = PostgresHook(database="database-override") @@ -338,7 +338,7 @@ def test_get_uri_from_connection_with_database_override(self): port=1, ) ) - assert "postgresql://login:password@host:1/database-override" == hook.get_uri() + assert hook.get_uri() == "postgresql://login:password@host:1/database-override" def test_schema_kwarg_database_kwarg_compatibility(self): database = "database-override" diff --git a/providers/tests/presto/hooks/test_presto.py b/providers/tests/presto/hooks/test_presto.py index cec977b64124b..bd4e2e589f0dd 100644 --- a/providers/tests/presto/hooks/test_presto.py +++ b/providers/tests/presto/hooks/test_presto.py @@ -292,5 +292,5 @@ def test_split_sql_string(self): ) def test_serialize_cell(self): - assert "foo" == self.db_hook._serialize_cell("foo", None) - assert 1 == self.db_hook._serialize_cell(1, None) + assert self.db_hook._serialize_cell("foo", None) == "foo" + assert self.db_hook._serialize_cell(1, None) == 1 diff --git a/providers/tests/smtp/hooks/test_smtp.py b/providers/tests/smtp/hooks/test_smtp.py index 06fbb246c215e..ead0d02229b0d 100644 --- a/providers/tests/smtp/hooks/test_smtp.py +++ b/providers/tests/smtp/hooks/test_smtp.py @@ -179,8 +179,8 @@ def test_send_smtp(self, mock_smtplib): ) assert mock_send_mime.called _, call_args = mock_send_mime.call_args - assert "from" == call_args["from_addr"] - assert ["to"] == call_args["to_addrs"] + assert call_args["from_addr"] == "from" + assert call_args["to_addrs"] == ["to"] msg = call_args["msg"] assert "Subject: subject" in msg assert "From: from" in msg diff --git a/providers/tests/snowflake/hooks/test_snowflake.py b/providers/tests/snowflake/hooks/test_snowflake.py index d75f1a4baf14c..f8696f81f249c 100644 --- a/providers/tests/snowflake/hooks/test_snowflake.py +++ b/providers/tests/snowflake/hooks/test_snowflake.py @@ -541,7 +541,7 @@ def test_hook_parameters_should_take_precedence(self): authenticator="TEST_AUTH", session_parameters={"AA": "AAA"}, ) - assert { + assert hook._get_conn_params == { "account": "TEST_ACCOUNT", "application": "AIRFLOW", "authenticator": "TEST_AUTH", @@ -553,11 +553,11 @@ def test_hook_parameters_should_take_precedence(self): "session_parameters": {"AA": "AAA"}, "user": "user", "warehouse": "TEST_WAREHOUSE", - } == hook._get_conn_params - assert ( + } + assert hook.get_uri() == ( "snowflake://user:pw@TEST_ACCOUNT.TEST_REGION/TEST_DATABASE/TEST_SCHEMA" "?application=AIRFLOW&authenticator=TEST_AUTH&role=TEST_ROLE&warehouse=TEST_WAREHOUSE" - ) == hook.get_uri() + ) @pytest.mark.parametrize( "sql,expected_sql,expected_query_ids", diff --git a/providers/tests/ssh/hooks/test_ssh.py b/providers/tests/ssh/hooks/test_ssh.py index ac40d01c6f68c..65e1858a6148d 100644 --- a/providers/tests/ssh/hooks/test_ssh.py +++ b/providers/tests/ssh/hooks/test_ssh.py @@ -541,7 +541,7 @@ def test_tunnel(self): hook.get_tunnel(local_port=2135, remote_port=2134), ): server_output = server_handle.stdout.read(5) - assert b"ready" == server_output + assert server_output == b"ready" socket = socket.socket() socket.connect(("localhost", 2135)) response = socket.recv(5) diff --git a/providers/tests/ssh/operators/test_ssh.py b/providers/tests/ssh/operators/test_ssh.py index bedd6c5373b42..06ea865fc4481 100644 --- a/providers/tests/ssh/operators/test_ssh.py +++ b/providers/tests/ssh/operators/test_ssh.py @@ -95,7 +95,7 @@ def test_hook_created_correctly(self, cmd_timeout, cmd_timeout_expected): ) assert conn_timeout == task.hook.conn_timeout assert cmd_timeout_expected == task.hook.cmd_timeout - assert "ssh_default" == task.hook.ssh_conn_id + assert task.hook.ssh_conn_id == "ssh_default" @pytest.mark.parametrize( ("enable_xcom_pickling", "output", "expected"), diff --git a/providers/tests/standard/operators/test_python.py b/providers/tests/standard/operators/test_python.py index a72073537aa4f..67af558e7ce1f 100644 --- a/providers/tests/standard/operators/test_python.py +++ b/providers/tests/standard/operators/test_python.py @@ -292,7 +292,7 @@ def test_provide_context_does_not_fail(self): """Ensures that provide_context doesn't break dags in 2.0.""" def func(custom, dag): - assert 1 == custom, "custom should be 1" + assert custom == 1, "custom should be 1" assert dag is not None, "dag should be set" error_message = "Invalid arguments were passed to PythonOperator \\(task_id: task_test-provide-context-does-not-fail\\). Invalid arguments were:\n\\*\\*kwargs: {'provide_context': True}" @@ -302,7 +302,7 @@ def func(custom, dag): def test_context_with_conflicting_op_args(self): def func(custom, dag): - assert 1 == custom, "custom should be 1" + assert custom == 1, "custom should be 1" assert dag is not None, "dag should be set" self.run_as_task(func, op_kwargs={"custom": 1}) diff --git a/providers/tests/standard/utils/test_python_virtualenv.py b/providers/tests/standard/utils/test_python_virtualenv.py index b5d31679aa5bb..378261a4e75ed 100644 --- a/providers/tests/standard/utils/test_python_virtualenv.py +++ b/providers/tests/standard/utils/test_python_virtualenv.py @@ -83,7 +83,7 @@ def test_should_create_virtualenv_pip(self, mock_execute_in_subprocess): python_bin = prepare_virtualenv( venv_directory="/VENV", python_bin="pythonVER", system_site_packages=False, requirements=[] ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_called_once_with(["pythonVER", "-m", "venv", "/VENV"]) @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") @@ -92,7 +92,7 @@ def test_should_create_virtualenv_uv(self, mock_execute_in_subprocess): python_bin = prepare_virtualenv( venv_directory="/VENV", python_bin="pythonVER", system_site_packages=False, requirements=[] ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_called_once_with( ["uv", "venv", "--allow-existing", "--seed", "--python", "pythonVER", "/VENV"] ) @@ -103,7 +103,7 @@ def test_should_create_virtualenv_with_system_packages_pip(self, mock_execute_in python_bin = prepare_virtualenv( venv_directory="/VENV", python_bin="pythonVER", system_site_packages=True, requirements=[] ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_called_once_with( ["pythonVER", "-m", "venv", "/VENV", "--system-site-packages"] ) @@ -114,7 +114,7 @@ def test_should_create_virtualenv_with_system_packages_uv(self, mock_execute_in_ python_bin = prepare_virtualenv( venv_directory="/VENV", python_bin="pythonVER", system_site_packages=True, requirements=[] ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_called_once_with( [ "uv", @@ -140,7 +140,7 @@ def test_pip_install_options_pip(self, mock_execute_in_subprocess): pip_install_options=pip_install_options, ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_called_with( ["/VENV/bin/pip", "install", *pip_install_options, "apache-beam[gcp]"] ) @@ -157,7 +157,7 @@ def test_pip_install_options_uv(self, mock_execute_in_subprocess): pip_install_options=pip_install_options, ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_called_with( ["uv", "pip", "install", "--python", "/VENV/bin/python", *pip_install_options, "apache-beam[gcp]"] ) @@ -171,7 +171,7 @@ def test_should_create_virtualenv_with_extra_packages_pip(self, mock_execute_in_ system_site_packages=False, requirements=["apache-beam[gcp]"], ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_any_call(["pythonVER", "-m", "venv", "/VENV"]) @@ -186,7 +186,7 @@ def test_should_create_virtualenv_with_extra_packages_uv(self, mock_execute_in_s system_site_packages=False, requirements=["apache-beam[gcp]"], ) - assert "/VENV/bin/python" == python_bin + assert python_bin == "/VENV/bin/python" mock_execute_in_subprocess.assert_called_with( ["uv", "pip", "install", "--python", "/VENV/bin/python", "apache-beam[gcp]"] diff --git a/providers/tests/telegram/hooks/test_telegram.py b/providers/tests/telegram/hooks/test_telegram.py index f1ec25dafb008..0702a10e74543 100644 --- a/providers/tests/telegram/hooks/test_telegram.py +++ b/providers/tests/telegram/hooks/test_telegram.py @@ -77,13 +77,13 @@ def test_should_raise_exception_if_conn_id_doesnt_exist(self): with pytest.raises(airflow.exceptions.AirflowNotFoundException) as ctx: TelegramHook(telegram_conn_id="telegram-webhook-non-existent") - assert "The conn_id `telegram-webhook-non-existent` isn't defined" == str(ctx.value) + assert str(ctx.value) == "The conn_id `telegram-webhook-non-existent` isn't defined" def test_should_raise_exception_if_conn_id_doesnt_contain_token(self): with pytest.raises(airflow.exceptions.AirflowException) as ctx: TelegramHook(telegram_conn_id="telegram-webhook-without-token") - assert "Missing token(password) in Telegram connection" == str(ctx.value) + assert str(ctx.value) == "Missing token(password) in Telegram connection" @mock.patch("airflow.providers.telegram.hooks.telegram.TelegramHook.get_conn") def test_should_raise_exception_if_chat_id_is_not_provided_anywhere(self, mock_get_conn): diff --git a/providers/tests/telegram/operators/test_telegram.py b/providers/tests/telegram/operators/test_telegram.py index 46916ce56763a..353f4b1f44aac 100644 --- a/providers/tests/telegram/operators/test_telegram.py +++ b/providers/tests/telegram/operators/test_telegram.py @@ -76,7 +76,7 @@ def test_should_throw_exception_if_connection_id_is_none(self): with pytest.raises(airflow.exceptions.AirflowException) as ctx: TelegramOperator(task_id="telegram", telegram_conn_id=None) - assert "No valid Telegram connection id supplied." == str(ctx.value) + assert str(ctx.value) == "No valid Telegram connection id supplied." @mock.patch("airflow.providers.telegram.operators.telegram.TelegramHook") def test_should_throw_exception_if_telegram_hook_throws_any_exception(self, mock_telegram_hook): @@ -148,7 +148,7 @@ def test_should_return_template_fields(self): text="some non empty text - higher precedence", telegram_kwargs={"custom_arg": "value", "text": "some text, that will be ignored"}, ) - assert ("text", "chat_id") == hook.template_fields + assert hook.template_fields == ("text", "chat_id") @mock.patch("airflow.providers.telegram.operators.telegram.TelegramHook") def test_should_return_templatized_text_field(self, mock_hook): diff --git a/providers/tests/teradata/utils/test_constants.py b/providers/tests/teradata/utils/test_constants.py index f4410f3b2c4d1..0760337537a11 100644 --- a/providers/tests/teradata/utils/test_constants.py +++ b/providers/tests/teradata/utils/test_constants.py @@ -54,52 +54,52 @@ def test_resume_db_status(): def test_operation_success_message(): expected_msg = "Compute Cluster %s %s operation completed successfully." - assert Constants.CC_OPR_SUCCESS_STATUS_MSG == expected_msg + assert expected_msg == Constants.CC_OPR_SUCCESS_STATUS_MSG def test_operation_failure_message(): expected_msg = "Compute Cluster %s %s operation has failed." - assert Constants.CC_OPR_FAILURE_STATUS_MSG == expected_msg + assert expected_msg == Constants.CC_OPR_FAILURE_STATUS_MSG def test_initializing_status_message(): expected_msg = "The environment is currently initializing. Please wait." - assert Constants.CC_OPR_INITIALIZING_STATUS_MSG == expected_msg + assert expected_msg == Constants.CC_OPR_INITIALIZING_STATUS_MSG def test_empty_profile_error_message(): expected_msg = "Please provide a valid name for the compute cluster profile." - assert Constants.CC_OPR_EMPTY_PROFILE_ERROR_MSG == expected_msg + assert expected_msg == Constants.CC_OPR_EMPTY_PROFILE_ERROR_MSG def test_non_exists_message(): expected_msg = "The specified Compute cluster is not present or The user doesn't have permission to access compute cluster." - assert Constants.CC_GRP_PRP_NON_EXISTS_MSG == expected_msg + assert expected_msg == Constants.CC_GRP_PRP_NON_EXISTS_MSG def test_unauthorized_message(): expected_msg = "The %s operation is not authorized for the user." - assert Constants.CC_GRP_PRP_UN_AUTHORIZED_MSG == expected_msg + assert expected_msg == Constants.CC_GRP_PRP_UN_AUTHORIZED_MSG def test_lake_support_only_message(): expected_msg = "Compute Groups is supported only on Vantage Cloud Lake." - assert Constants.CC_GRP_LAKE_SUPPORT_ONLY_MSG == expected_msg + assert expected_msg == Constants.CC_GRP_LAKE_SUPPORT_ONLY_MSG def test_timeout_error_message(): expected_msg = "There is an issue with the %s operation. Kindly consult the administrator for assistance." - assert Constants.CC_OPR_TIMEOUT_ERROR == expected_msg + assert expected_msg == Constants.CC_OPR_TIMEOUT_ERROR def test_exists_message(): expected_msg = "The specified Compute cluster is already exists." - assert Constants.CC_GRP_PRP_EXISTS_MSG == expected_msg + assert expected_msg == Constants.CC_GRP_PRP_EXISTS_MSG def test_empty_copy_profile_error_message(): expected_msg = "Please provide a valid name for the source and target compute profile." - assert Constants.CC_OPR_EMPTY_COPY_PROFILE_ERROR_MSG == expected_msg + assert expected_msg == Constants.CC_OPR_EMPTY_COPY_PROFILE_ERROR_MSG def test_timeout_value(): diff --git a/providers/tests/trino/hooks/test_trino.py b/providers/tests/trino/hooks/test_trino.py index 696cf28bd8bf2..1cebc4eaaa469 100644 --- a/providers/tests/trino/hooks/test_trino.py +++ b/providers/tests/trino/hooks/test_trino.py @@ -381,5 +381,5 @@ def test_connection_failure(self, mock_conn): assert msg == "Test" def test_serialize_cell(self): - assert "foo" == self.db_hook._serialize_cell("foo", None) - assert 1 == self.db_hook._serialize_cell(1, None) + assert self.db_hook._serialize_cell("foo", None) == "foo" + assert self.db_hook._serialize_cell(1, None) == 1 diff --git a/pyproject.toml b/pyproject.toml index 46230e5dc8fb8..47d1d177fd3db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -244,6 +244,8 @@ extend-select = [ "PGH004", # Use specific rule codes when using noqa "PGH005", # Invalid unittest.mock.Mock methods/attributes/properties "S101", # Checks use `assert` outside the test cases, test cases should be added into the exclusions + "SIM300", # Checks for conditions that position a constant on the left-hand side of the comparison + # operator, rather than the right-hand side. "B004", # Checks for use of hasattr(x, "__call__") and replaces it with callable(x) "B006", # Checks for uses of mutable objects as function argument defaults. "B007", # Checks for unused variables in the loop diff --git a/task_sdk/tests/defintions/test_dag.py b/task_sdk/tests/defintions/test_dag.py index d2250194aab0a..49699b673534d 100644 --- a/task_sdk/tests/defintions/test_dag.py +++ b/task_sdk/tests/defintions/test_dag.py @@ -34,7 +34,7 @@ class TestDag: def test_dag_topological_sort_dag_without_tasks(self): dag = DAG("dag", schedule=None, start_date=DEFAULT_DATE, default_args={"owner": "owner1"}) - assert () == dag.topological_sort() + assert dag.topological_sort() == () def test_dag_naive_start_date_string(self): DAG("DAG", schedule=None, default_args={"start_date": "2019-06-01"}) @@ -120,7 +120,7 @@ def test_params_not_passed_is_empty_dict(self): dag = DAG("test-dag", schedule=None) assert isinstance(dag.params, ParamsDict) - assert 0 == len(dag.params) + assert len(dag.params) == 0 def test_params_passed_and_params_in_default_args_no_override(self): """ diff --git a/tests/always/test_connection.py b/tests/always/test_connection.py index 3cc24b6d9ce38..86037bcd4b825 100644 --- a/tests/always/test_connection.py +++ b/tests/always/test_connection.py @@ -592,11 +592,11 @@ def test_from_json_special_characters(self, val, expected): ) def test_using_env_var(self): conn = SqliteHook.get_connection(conn_id="test_uri") - assert "ec2.compute.com" == conn.host - assert "the_database" == conn.schema - assert "username" == conn.login - assert "password!" == conn.password - assert 5432 == conn.port + assert conn.host == "ec2.compute.com" + assert conn.schema == "the_database" + assert conn.login == "username" + assert conn.password == "password!" + assert conn.port == 5432 self.mask_secret.assert_has_calls([mock.call("password!"), mock.call(quote("password!"))]) @@ -608,8 +608,8 @@ def test_using_env_var(self): ) def test_using_unix_socket_env_var(self): conn = SqliteHook.get_connection(conn_id="test_uri_no_creds") - assert "ec2.compute.com" == conn.host - assert "the_database" == conn.schema + assert conn.host == "ec2.compute.com" + assert conn.schema == "the_database" assert conn.login is None assert conn.password is None assert conn.port is None @@ -623,16 +623,16 @@ def test_param_setup(self): password="airflow", schema="airflow", ) - assert "localhost" == conn.host - assert "airflow" == conn.schema - assert "airflow" == conn.login - assert "airflow" == conn.password + assert conn.host == "localhost" + assert conn.schema == "airflow" + assert conn.login == "airflow" + assert conn.password == "airflow" assert conn.port is None @pytest.mark.db_test def test_env_var_priority(self): conn = SqliteHook.get_connection(conn_id="airflow_db") - assert "ec2.compute.com" != conn.host + assert conn.host != "ec2.compute.com" with mock.patch.dict( "os.environ", @@ -641,11 +641,11 @@ def test_env_var_priority(self): }, ): conn = SqliteHook.get_connection(conn_id="airflow_db") - assert "ec2.compute.com" == conn.host - assert "the_database" == conn.schema - assert "username" == conn.login - assert "password" == conn.password - assert 5432 == conn.port + assert conn.host == "ec2.compute.com" + assert conn.schema == "the_database" + assert conn.login == "username" + assert conn.password == "password" + assert conn.port == 5432 @mock.patch.dict( "os.environ", @@ -657,10 +657,10 @@ def test_env_var_priority(self): def test_dbapi_get_uri(self): conn = BaseHook.get_connection(conn_id="test_uri") hook = conn.get_hook() - assert "postgresql://username:password@ec2.compute.com:5432/the_database" == hook.get_uri() + assert hook.get_uri() == "postgresql://username:password@ec2.compute.com:5432/the_database" conn2 = BaseHook.get_connection(conn_id="test_uri_no_creds") hook2 = conn2.get_hook() - assert "postgresql://ec2.compute.com/the_database" == hook2.get_uri() + assert hook2.get_uri() == "postgresql://ec2.compute.com/the_database" @mock.patch.dict( "os.environ", @@ -674,7 +674,7 @@ def test_dbapi_get_sqlalchemy_engine(self): hook = conn.get_hook() engine = hook.get_sqlalchemy_engine() assert isinstance(engine, sqlalchemy.engine.Engine) - assert "postgresql://username:password@ec2.compute.com:5432/the_database" == str(engine.url) + assert str(engine.url) == "postgresql://username:password@ec2.compute.com:5432/the_database" @mock.patch.dict( "os.environ", diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py index 1960f403b04ec..140e5d2d15098 100644 --- a/tests/always/test_project_structure.py +++ b/tests/always/test_project_structure.py @@ -281,7 +281,7 @@ def test_missing_examples(self): are used in any of the example dags """ classes = self.list_of_classes() - assert 0 != len(classes), "Failed to retrieve operators, override class_paths if needed" + assert len(classes) != 0, "Failed to retrieve operators, override class_paths if needed" classes = set(classes.keys()) for example in self.example_paths(): classes -= get_imports_from_file(example) @@ -566,4 +566,4 @@ def test_no_illegal_suffixes(self): invalid_files = [f for f in files if f.endswith(tuple(illegal_suffixes))] - assert [] == invalid_files + assert invalid_files == [] diff --git a/tests/always/test_providers_manager.py b/tests/always/test_providers_manager.py index 3558a8cbdd760..4af0c72971363 100644 --- a/tests/always/test_providers_manager.py +++ b/tests/always/test_providers_manager.py @@ -71,7 +71,7 @@ def test_providers_are_loaded(self): # just a coherence check - no exact number as otherwise we would have to update # several tests if we add new connections/provider which is not ideal assert len(provider_list) > 65 - assert [] == self._caplog.records + assert self._caplog.records == [] def test_hooks_deprecation_warnings_generated(self): with pytest.warns(expected_warning=DeprecationWarning, match="hook-class-names") as warning_records: @@ -101,7 +101,7 @@ def test_hooks_deprecation_warnings_not_generated(self): package_or_source="package", ) providers_manager._discover_hooks() - assert [] == [w.message for w in warning_records if "hook-class-names" in str(w.message)] + assert [w.message for w in warning_records if "hook-class-names" in str(w.message)] == [] def test_warning_logs_generated(self): providers_manager = ProvidersManager() @@ -235,7 +235,7 @@ def test_hooks(self): print(record.message, file=sys.stderr) print(record.exc_info, file=sys.stderr) raise AssertionError("There are warnings generated during hook imports. Please fix them") - assert [] == [w.message for w in warning_records if "hook-class-names" in str(w.message)] + assert [w.message for w in warning_records if "hook-class-names" in str(w.message)] == [] @pytest.mark.execution_timeout(150) def test_hook_values(self): @@ -264,7 +264,7 @@ def test_hook_values(self): real_warning_count += 1 if real_warning_count: raise AssertionError("There are warnings generated during hook imports. Please fix them") - assert [] == [w.message for w in warning_records if "hook-class-names" in str(w.message)] + assert [w.message for w in warning_records if "hook-class-names" in str(w.message)] == [] def test_connection_form_widgets(self): provider_manager = ProvidersManager() @@ -459,7 +459,7 @@ def test_optional_feature_no_warning(self, mock_importlib_import_string): providers_manager._import_hook( hook_class_name=None, provider_info=None, package_name=None, connection_type="test_connection" ) - assert [] == self._caplog.messages + assert self._caplog.messages == [] @patch("airflow.providers_manager.import_string") def test_optional_feature_debug(self, mock_importlib_import_string): @@ -472,9 +472,9 @@ def test_optional_feature_debug(self, mock_importlib_import_string): providers_manager._import_hook( hook_class_name=None, provider_info=None, package_name=None, connection_type="test_connection" ) - assert [ + assert self._caplog.messages == [ "Optional provider feature disabled when importing 'HookClass' from 'test_package' package" - ] == self._caplog.messages + ] @pytest.mark.parametrize( diff --git a/tests/always/test_secrets.py b/tests/always/test_secrets.py index e58c8ccac6ec3..347956a891a0b 100644 --- a/tests/always/test_secrets.py +++ b/tests/always/test_secrets.py @@ -62,7 +62,7 @@ def test_initialize_secrets_backends(self): backends = initialize_secrets_backends() backend_classes = [backend.__class__.__name__ for backend in backends] - assert 3 == len(backends) + assert len(backends) == 3 assert "SystemsManagerParameterStoreBackend" in backend_classes @conf_vars( @@ -115,7 +115,7 @@ def test_backend_fallback_to_env_var(self, mock_get_connection): # Assert that SystemsManagerParameterStoreBackend.get_conn_uri was called mock_get_connection.assert_called_once_with(conn_id="test_mysql") - assert "mysql://airflow:airflow@host:5432/airflow" == conn.get_uri() + assert conn.get_uri() == "mysql://airflow:airflow@host:5432/airflow" @pytest.mark.db_test @@ -160,7 +160,7 @@ def test_backend_fallback_to_default_var(self): the value returned is default_var """ variable_value = Variable.get(key="test_var", default_var="new") - assert "new" == variable_value + assert variable_value == "new" @conf_vars( { @@ -190,14 +190,14 @@ def test_backend_variable_order(self, mock_secret_get, mock_meta_get): mock_secret_get.return_value = None mock_meta_get.return_value = None - assert "a_venv_value" == Variable.get(key="MYVAR") + assert Variable.get(key="MYVAR") == "a_venv_value" mock_secret_get.assert_called_with(key="MYVAR") mock_meta_get.assert_not_called() mock_secret_get.return_value = None mock_meta_get.return_value = "a_metastore_value" - assert "a_metastore_value" == Variable.get(key="not_myvar") + assert Variable.get(key="not_myvar") == "a_metastore_value" mock_meta_get.assert_called_once_with(key="not_myvar") mock_secret_get.return_value = "a_secret_value" - assert "a_secret_value" == Variable.get(key="not_myvar") + assert Variable.get(key="not_myvar") == "a_secret_value" diff --git a/tests/always/test_secrets_backends.py b/tests/always/test_secrets_backends.py index 9a87de3f3c3b4..87cbd18f0478b 100644 --- a/tests/always/test_secrets_backends.py +++ b/tests/always/test_secrets_backends.py @@ -91,15 +91,15 @@ def test_connection_metastore_secrets_backend(self): def test_variable_env_secrets_backend(self): env_secrets_backend = EnvironmentVariablesBackend() variable_value = env_secrets_backend.get_variable(key="hello") - assert "World" == variable_value + assert variable_value == "World" assert env_secrets_backend.get_variable(key="non_existent_key") is None - assert "" == env_secrets_backend.get_variable(key="empty_str") + assert env_secrets_backend.get_variable(key="empty_str") == "" def test_variable_metastore_secrets_backend(self): Variable.set(key="hello", value="World") Variable.set(key="empty_str", value="") metastore_backend = MetastoreBackend() variable_value = metastore_backend.get_variable(key="hello") - assert "World" == variable_value + assert variable_value == "World" assert metastore_backend.get_variable(key="non_existent_key") is None - assert "" == metastore_backend.get_variable(key="empty_str") + assert metastore_backend.get_variable(key="empty_str") == "" diff --git a/tests/always/test_secrets_local_filesystem.py b/tests/always/test_secrets_local_filesystem.py index e8b179787c1b8..7fca7a8dca129 100644 --- a/tests/always/test_secrets_local_filesystem.py +++ b/tests/always/test_secrets_local_filesystem.py @@ -426,7 +426,7 @@ def test_should_read_variable(self, tmp_path): path = tmp_path / "testfile.var.env" path.write_text("KEY_A=VAL_A") backend = LocalFilesystemBackend(variables_file_path=os.fspath(path)) - assert "VAL_A" == backend.get_variable("KEY_A") + assert backend.get_variable("KEY_A") == "VAL_A" assert backend.get_variable("KEY_B") is None @conf_vars( @@ -450,7 +450,7 @@ def test_should_read_connection(self, tmp_path): path = tmp_path / "testfile.env" path.write_text("CONN_A=mysql://host_a") backend = LocalFilesystemBackend(connections_file_path=os.fspath(path)) - assert "mysql://host_a" == backend.get_connection("CONN_A").get_uri() + assert backend.get_connection("CONN_A").get_uri() == "mysql://host_a" assert backend.get_variable("CONN_B") is None def test_files_are_optional(self): diff --git a/tests/api_connexion/endpoints/test_asset_endpoint.py b/tests/api_connexion/endpoints/test_asset_endpoint.py index 502fb8b9e873a..db064ac5b443e 100644 --- a/tests/api_connexion/endpoints/test_asset_endpoint.py +++ b/tests/api_connexion/endpoints/test_asset_endpoint.py @@ -117,12 +117,12 @@ def test_should_respond_404(self): environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "The Asset with uri: `s3://bucket/key` was not found", "status": 404, "title": "Asset not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self, session): self._create_asset(session) diff --git a/tests/api_connexion/endpoints/test_connection_endpoint.py b/tests/api_connexion/endpoints/test_connection_endpoint.py index 01f02db26ad02..3f27028aa8dfc 100644 --- a/tests/api_connexion/endpoints/test_connection_endpoint.py +++ b/tests/api_connexion/endpoints/test_connection_endpoint.py @@ -160,12 +160,12 @@ def test_should_respond_404(self): "/api/v1/connections/invalid-connection", environ_overrides={"REMOTE_USER": "test"} ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "The Connection with connection_id: `invalid-connection` was not found", "status": 404, "title": "Connection not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self): response = self.client.get("/api/v1/connections/test-connection-id") @@ -506,12 +506,12 @@ def test_patch_should_respond_404_not_found(self): "/api/v1/connections/test-connection-id", json=payload, environ_overrides={"REMOTE_USER": "test"} ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "The Connection with connection_id: `test-connection-id` was not found", "status": 404, "title": "Connection not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self, session): self._create_connection(session) diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index d025bbf6d396d..1c35f46754299 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -165,7 +165,7 @@ def test_should_respond_200(self): self._create_dag_models(1) response = self.client.get("/api/v1/dags/TEST_DAG_1", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 200 - assert { + assert response.json == { "dag_id": "TEST_DAG_1", "dag_display_name": "TEST_DAG_1", "description": None, @@ -189,7 +189,7 @@ def test_should_respond_200(self): "last_parsed_time": None, "timetable_description": None, "has_import_errors": False, - } == response.json + } @conf_vars({("webserver", "secret_key"): "mysecret"}) def test_should_respond_200_with_schedule_none(self, session): @@ -203,7 +203,7 @@ def test_should_respond_200_with_schedule_none(self, session): session.commit() response = self.client.get("/api/v1/dags/TEST_DAG_1", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 200 - assert { + assert response.json == { "dag_id": "TEST_DAG_1", "dag_display_name": "TEST_DAG_1", "description": None, @@ -227,7 +227,7 @@ def test_should_respond_200_with_schedule_none(self, session): "last_parsed_time": None, "timetable_description": None, "has_import_errors": False, - } == response.json + } def test_should_respond_404(self): response = self.client.get("/api/v1/dags/INVALID_DAG", environ_overrides={"REMOTE_USER": "test"}) @@ -669,7 +669,7 @@ def test_should_respond_200(self, session, url_safe_serializer): file_token2 = url_safe_serializer.dumps("/tmp/dag_2.py") assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -723,7 +723,7 @@ def test_should_respond_200(self, session, url_safe_serializer): }, ], "total_entries": 2, - } == response.json + } def test_only_active_true_returns_active_dags(self, url_safe_serializer): self._create_dag_models(1) @@ -731,7 +731,7 @@ def test_only_active_true_returns_active_dags(self, url_safe_serializer): response = self.client.get("api/v1/dags?only_active=True", environ_overrides={"REMOTE_USER": "test"}) file_token = url_safe_serializer.dumps("/tmp/dag_1.py") assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -760,7 +760,7 @@ def test_only_active_true_returns_active_dags(self, url_safe_serializer): } ], "total_entries": 1, - } == response.json + } def test_only_active_false_returns_all_dags(self, url_safe_serializer): self._create_dag_models(1) @@ -769,7 +769,7 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer): file_token = url_safe_serializer.dumps("/tmp/dag_1.py") file_token_2 = url_safe_serializer.dumps("/tmp/dag_del_1.py") assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -823,7 +823,7 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer): }, ], "total_entries": 2, - } == response.json + } @pytest.mark.parametrize( "url, expected_dag_ids", @@ -918,7 +918,7 @@ def test_should_respond_200_and_handle_pagination(self, url, expected_dag_ids): dag_ids = [dag["dag_id"] for dag in response.json["dags"]] assert expected_dag_ids == dag_ids - assert 10 == response.json["total_entries"] + assert response.json["total_entries"] == 10 def test_should_respond_200_default_limit(self): self._create_dag_models(101) @@ -927,8 +927,8 @@ def test_should_respond_200_default_limit(self): assert response.status_code == 200 - assert 100 == len(response.json["dags"]) - assert 101 == response.json["total_entries"] + assert len(response.json["dags"]) == 100 + assert response.json["total_entries"] == 101 def test_should_raises_401_unauthenticated(self): response = self.client.get("api/v1/dags") @@ -948,7 +948,7 @@ def test_paused_true_returns_paused_dags(self, url_safe_serializer): response = self.client.get("api/v1/dags?paused=True", environ_overrides={"REMOTE_USER": "test"}) file_token = url_safe_serializer.dumps("/tmp/dag_1.py") assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_PAUSED_1", @@ -977,7 +977,7 @@ def test_paused_true_returns_paused_dags(self, url_safe_serializer): } ], "total_entries": 1, - } == response.json + } def test_paused_false_returns_unpaused_dags(self, url_safe_serializer): self._create_dag_models(1, dag_id_prefix="TEST_DAG_PAUSED", is_paused=True) @@ -985,7 +985,7 @@ def test_paused_false_returns_unpaused_dags(self, url_safe_serializer): response = self.client.get("api/v1/dags?paused=False", environ_overrides={"REMOTE_USER": "test"}) file_token = url_safe_serializer.dumps("/tmp/dag_1.py") assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_UNPAUSED_1", @@ -1014,7 +1014,7 @@ def test_paused_false_returns_unpaused_dags(self, url_safe_serializer): } ], "total_entries": 1, - } == response.json + } def test_paused_none_returns_all_dags(self, url_safe_serializer): self._create_dag_models(1, dag_id_prefix="TEST_DAG_PAUSED", is_paused=True) @@ -1022,7 +1022,7 @@ def test_paused_none_returns_all_dags(self, url_safe_serializer): response = self.client.get("api/v1/dags", environ_overrides={"REMOTE_USER": "test"}) file_token = url_safe_serializer.dumps("/tmp/dag_1.py") assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_PAUSED_1", @@ -1076,7 +1076,7 @@ def test_paused_none_returns_all_dags(self, url_safe_serializer): }, ], "total_entries": 2, - } == response.json + } def test_should_return_specified_fields(self): self._create_dag_models(2) @@ -1322,7 +1322,7 @@ def test_should_respond_200_on_patch_is_paused(self, session, url_safe_serialize ) assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -1376,7 +1376,7 @@ def test_should_respond_200_on_patch_is_paused(self, session, url_safe_serialize }, ], "total_entries": 2, - } == response.json + } _check_last_log(session, dag_id=None, event="api.patch_dags", logical_date=None) def test_should_respond_200_on_patch_is_paused_using_update_mask(self, session, url_safe_serializer): @@ -1397,7 +1397,7 @@ def test_should_respond_200_on_patch_is_paused_using_update_mask(self, session, ) assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -1451,7 +1451,7 @@ def test_should_respond_200_on_patch_is_paused_using_update_mask(self, session, }, ], "total_entries": 2, - } == response.json + } _check_last_log(session, dag_id=None, event="api.patch_dags", logical_date=None) def test_wrong_value_as_update_mask_rasise(self, session): @@ -1512,7 +1512,7 @@ def test_only_active_true_returns_active_dags(self, url_safe_serializer, session environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -1541,7 +1541,7 @@ def test_only_active_true_returns_active_dags(self, url_safe_serializer, session } ], "total_entries": 1, - } == response.json + } _check_last_log(session, dag_id=None, event="api.patch_dags", logical_date=None) def test_only_active_false_returns_all_dags(self, url_safe_serializer, session): @@ -1558,7 +1558,7 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer, session): file_token_2 = url_safe_serializer.dumps("/tmp/dag_del_1.py") assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -1612,7 +1612,7 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer, session): }, ], "total_entries": 2, - } == response.json + } _check_last_log(session, dag_id=None, event="api.patch_dags", logical_date=None) @pytest.mark.parametrize( @@ -1725,7 +1725,7 @@ def test_should_respond_200_and_handle_pagination(self, url, expected_dag_ids): dag_ids = [dag["dag_id"] for dag in response.json["dags"]] assert expected_dag_ids == dag_ids - assert 10 == response.json["total_entries"] + assert response.json["total_entries"] == 10 def test_should_respond_200_default_limit(self): self._create_dag_models(101) @@ -1740,8 +1740,8 @@ def test_should_respond_200_default_limit(self): assert response.status_code == 200 - assert 100 == len(response.json["dags"]) - assert 101 == response.json["total_entries"] + assert len(response.json["dags"]) == 100 + assert response.json["total_entries"] == 101 def test_should_raises_401_unauthenticated(self): response = self.client.patch( @@ -1779,7 +1779,7 @@ def test_should_respond_200_and_pause_dags(self, url_safe_serializer): ) assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -1833,7 +1833,7 @@ def test_should_respond_200_and_pause_dags(self, url_safe_serializer): }, ], "total_entries": 2, - } == response.json + } @provide_session def test_should_respond_200_and_pause_dag_pattern(self, session, url_safe_serializer): @@ -1850,7 +1850,7 @@ def test_should_respond_200_and_pause_dag_pattern(self, session, url_safe_serial ) assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_1", @@ -1904,7 +1904,7 @@ def test_should_respond_200_and_pause_dag_pattern(self, session, url_safe_serial }, ], "total_entries": 2, - } == response.json + } dags_not_updated = session.query(DagModel).filter(~DagModel.is_paused) assert len(dags_not_updated.all()) == 8 @@ -1923,7 +1923,7 @@ def test_should_respond_200_and_reverse_ordering(self, session, url_safe_seriali ) assert response.status_code == 200 - assert { + assert response.json == { "dags": [ { "dag_id": "TEST_DAG_2", @@ -1977,7 +1977,7 @@ def test_should_respond_200_and_reverse_ordering(self, session, url_safe_seriali }, ], "total_entries": 2, - } == response.json + } def test_should_respons_400_dag_id_pattern_missing(self): self._create_dag_models(1) diff --git a/tests/api_connexion/endpoints/test_dag_parsing.py b/tests/api_connexion/endpoints/test_dag_parsing.py index 4a0a68879933d..1df80a905d92e 100644 --- a/tests/api_connexion/endpoints/test_dag_parsing.py +++ b/tests/api_connexion/endpoints/test_dag_parsing.py @@ -80,7 +80,7 @@ def test_201_and_400_requests(self, url_safe_serializer, session): response = self.client.put( url, headers={"Accept": "application/json"}, environ_overrides={"REMOTE_USER": "test"} ) - assert 201 == response.status_code + assert response.status_code == 201 parsing_requests = session.scalars(select(DagPriorityParsingRequest)).all() assert parsing_requests[0].fileloc == test_dag.fileloc @@ -88,7 +88,7 @@ def test_201_and_400_requests(self, url_safe_serializer, session): response = self.client.put( url, headers={"Accept": "application/json"}, environ_overrides={"REMOTE_USER": "test"} ) - assert 201 == response.status_code + assert response.status_code == 201 parsing_requests = session.scalars(select(DagPriorityParsingRequest)).all() assert parsing_requests[0].fileloc == test_dag.fileloc diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py b/tests/api_connexion/endpoints/test_dag_run_endpoint.py index fbd946ea32e83..5b4133c683958 100644 --- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py @@ -1237,12 +1237,12 @@ def test_should_respond_400_if_a_dag_has_import_errors(self, session): json={}, environ_overrides={"REMOTE_USER": "test"}, ) - assert { + assert response.json == { "detail": "DAG with dag_id: 'TEST_DAG_ID' has import errors", "status": 400, "title": "DAG cannot be triggered", "type": EXCEPTIONS_LINK_MAP[400], - } == response.json + } def test_should_response_200_for_matching_logical_date(self): logical_date = "2020-11-10T08:25:56.939143+00:00" @@ -1366,12 +1366,12 @@ def test_response_404(self): environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "DAG with dag_id: 'TEST_DAG_ID' not found", "status": 404, "title": "DAG not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } @pytest.mark.parametrize( "url, request_json, expected_response", diff --git a/tests/api_connexion/endpoints/test_dag_source_endpoint.py b/tests/api_connexion/endpoints/test_dag_source_endpoint.py index f926ecdc9d91e..800843dc927f7 100644 --- a/tests/api_connexion/endpoints/test_dag_source_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_source_endpoint.py @@ -80,9 +80,9 @@ def test_should_respond_200_text(self, test_dag): url, headers={"Accept": "text/plain"}, environ_overrides={"REMOTE_USER": "test"} ) - assert 200 == response.status_code + assert response.status_code == 200 assert dag_content == response.data.decode() - assert "text/plain" == response.headers["Content-Type"] + assert response.headers["Content-Type"] == "text/plain" def test_should_respond_200_json(self, session, test_dag): dag_content = self._get_dag_file_code(test_dag.fileloc) @@ -92,13 +92,13 @@ def test_should_respond_200_json(self, session, test_dag): url, headers={"Accept": "application/json"}, environ_overrides={"REMOTE_USER": "test"} ) - assert 200 == response.status_code + assert response.status_code == 200 assert response.json == { "content": dag_content, "dag_id": TEST_DAG_ID, "version_number": 1, } - assert "application/json" == response.headers["Content-Type"] + assert response.headers["Content-Type"] == "application/json" @pytest.mark.parametrize("accept", ["application/json", "text/plain"]) def test_should_respond_200_version(self, accept, session, test_dag): @@ -123,15 +123,15 @@ def test_should_respond_200_version(self, accept, session, test_dag): url = f"/api/v1/dagSources/{TEST_DAG_ID}" response = self.client.get(url, headers={"Accept": accept}, environ_overrides={"REMOTE_USER": "test"}) - assert 200 == response.status_code + assert response.status_code == 200 if accept == "text/plain": assert dag_content2 == response.data.decode() assert dag_content != response.data.decode() - assert "text/plain" == response.headers["Content-Type"] + assert response.headers["Content-Type"] == "text/plain" else: assert dag_content2 == response.json["content"] assert dag_content != response.json["content"] - assert "application/json" == response.headers["Content-Type"] + assert response.headers["Content-Type"] == "application/json" assert response.json == { "content": dag_content2, "dag_id": TEST_DAG_ID, @@ -145,7 +145,7 @@ def test_should_respond_404(self): url, headers={"Accept": "application/json"}, environ_overrides={"REMOTE_USER": "test"} ) - assert 404 == response.status_code + assert response.status_code == 404 def test_should_raises_401_unauthenticated(self): response = self.client.get( diff --git a/tests/api_connexion/endpoints/test_event_log_endpoint.py b/tests/api_connexion/endpoints/test_event_log_endpoint.py index 8d598a3259cd1..8574ae9f3bf2e 100644 --- a/tests/api_connexion/endpoints/test_event_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_event_log_endpoint.py @@ -120,12 +120,12 @@ def test_should_respond_200(self, log_model): def test_should_respond_404(self): response = self.client.get("/api/v1/eventLogs/1", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 404 - assert { + assert response.json == { "detail": None, "status": 404, "title": "Event Log not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self, log_model): event_log_id = log_model.id diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py b/tests/api_connexion/endpoints/test_extra_link_endpoint.py index 4a14ef6f481a1..9fe083d824edb 100644 --- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py +++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py @@ -128,13 +128,13 @@ def _create_dag(self): def test_should_respond_404(self, url, expected_title, expected_detail): response = self.client.get(url, environ_overrides={"REMOTE_USER": "test"}) - assert 404 == response.status_code - assert { + assert response.status_code == 404 + assert response.json == { "detail": expected_detail, "status": 404, "title": expected_title, "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raise_403_forbidden(self): response = self.client.get( @@ -157,8 +157,8 @@ def test_should_respond_200(self): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code, response.data - assert {"Google Custom": "http://google.com/custom_base_link?search=TEST_LINK_VALUE"} == response.json + assert response.status_code == 200, response.data + assert response.json == {"Google Custom": "http://google.com/custom_base_link?search=TEST_LINK_VALUE"} @mock_plugin_manager(plugins=[]) def test_should_respond_200_missing_xcom(self): @@ -167,8 +167,8 @@ def test_should_respond_200_missing_xcom(self): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code, response.data - assert {"Google Custom": None} == response.json + assert response.status_code == 200, response.data + assert response.json == {"Google Custom": None} @mock_plugin_manager(plugins=[]) def test_should_respond_200_multiple_links(self): @@ -184,11 +184,11 @@ def test_should_respond_200_multiple_links(self): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code, response.data - assert { + assert response.status_code == 200, response.data + assert response.json == { "BigQuery Console #1": "https://console.cloud.google.com/bigquery?j=TEST_LINK_VALUE_1", "BigQuery Console #2": "https://console.cloud.google.com/bigquery?j=TEST_LINK_VALUE_2", - } == response.json + } @mock_plugin_manager(plugins=[]) def test_should_respond_200_multiple_links_missing_xcom(self): @@ -197,8 +197,8 @@ def test_should_respond_200_multiple_links_missing_xcom(self): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code, response.data - assert {"BigQuery Console #1": None, "BigQuery Console #2": None} == response.json + assert response.status_code == 200, response.data + assert response.json == {"BigQuery Console #1": None, "BigQuery Console #2": None} def test_should_respond_200_support_plugins(self): class GoogleLink(BaseOperatorLink): @@ -232,12 +232,12 @@ class AirflowTestPlugin(AirflowPlugin): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code, response.data - assert { + assert response.status_code == 200, response.data + assert response.json == { "Google Custom": None, "Google": "https://www.google.com", "S3": ( "https://s3.amazonaws.com/airflow-logs/" "TEST_DAG_ID/TEST_SINGLE_LINK/2020-01-01T00%3A00%3A00%2B00%3A00" ), - } == response.json + } diff --git a/tests/api_connexion/endpoints/test_health_endpoint.py b/tests/api_connexion/endpoints/test_health_endpoint.py index 7d73b338e5105..a04a3be751950 100644 --- a/tests/api_connexion/endpoints/test_health_endpoint.py +++ b/tests/api_connexion/endpoints/test_health_endpoint.py @@ -55,8 +55,8 @@ def test_healthy_scheduler_status(self, session): session.add(job) session.commit() resp_json = self.client.get("/api/v1/health").json - assert "healthy" == resp_json["metadatabase"]["status"] - assert "healthy" == resp_json["scheduler"]["status"] + assert resp_json["metadatabase"]["status"] == "healthy" + assert resp_json["scheduler"]["status"] == "healthy" assert ( last_scheduler_heartbeat_for_testing_1.isoformat() == resp_json["scheduler"]["latest_scheduler_heartbeat"] @@ -70,8 +70,8 @@ def test_unhealthy_scheduler_is_slow(self, session): session.add(job) session.commit() resp_json = self.client.get("/api/v1/health").json - assert "healthy" == resp_json["metadatabase"]["status"] - assert "unhealthy" == resp_json["scheduler"]["status"] + assert resp_json["metadatabase"]["status"] == "healthy" + assert resp_json["scheduler"]["status"] == "unhealthy" assert ( last_scheduler_heartbeat_for_testing_2.isoformat() == resp_json["scheduler"]["latest_scheduler_heartbeat"] @@ -79,13 +79,13 @@ def test_unhealthy_scheduler_is_slow(self, session): def test_unhealthy_scheduler_no_job(self): resp_json = self.client.get("/api/v1/health").json - assert "healthy" == resp_json["metadatabase"]["status"] - assert "unhealthy" == resp_json["scheduler"]["status"] + assert resp_json["metadatabase"]["status"] == "healthy" + assert resp_json["scheduler"]["status"] == "unhealthy" assert resp_json["scheduler"]["latest_scheduler_heartbeat"] is None @mock.patch.object(SchedulerJobRunner, "most_recent_job") def test_unhealthy_metadatabase_status(self, most_recent_job_mock): most_recent_job_mock.side_effect = Exception resp_json = self.client.get("/api/v1/health").json - assert "unhealthy" == resp_json["metadatabase"]["status"] + assert resp_json["metadatabase"]["status"] == "unhealthy" assert resp_json["scheduler"]["latest_scheduler_heartbeat"] is None diff --git a/tests/api_connexion/endpoints/test_import_error_endpoint.py b/tests/api_connexion/endpoints/test_import_error_endpoint.py index daac44a597ecd..6697c776d9052 100644 --- a/tests/api_connexion/endpoints/test_import_error_endpoint.py +++ b/tests/api_connexion/endpoints/test_import_error_endpoint.py @@ -88,22 +88,22 @@ def test_response_200(self, session): assert response.status_code == 200 response_data = response.json response_data["import_error_id"] = 1 - assert { + assert response_data == { "filename": "Lorem_ipsum.py", "import_error_id": 1, "stack_trace": "Lorem ipsum", "timestamp": "2020-06-10T12:00:00+00:00", - } == response_data + } def test_response_404(self): response = self.client.get("/api/v1/importErrors/2", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 404 - assert { + assert response.json == { "detail": "The ImportError with import_error_id: `2` was not found", "status": 404, "title": "Import error not found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self, session): import_error = ParseImportError( @@ -143,7 +143,7 @@ def test_get_import_errors(self, session): assert response.status_code == 200 response_data = response.json self._normalize_import_errors(response_data["import_errors"]) - assert { + assert response_data == { "import_errors": [ { "filename": "Lorem_ipsum.py", @@ -159,7 +159,7 @@ def test_get_import_errors(self, session): }, ], "total_entries": 2, - } == response_data + } def test_get_import_errors_order_by(self, session): import_error = [ @@ -180,7 +180,7 @@ def test_get_import_errors_order_by(self, session): assert response.status_code == 200 response_data = response.json self._normalize_import_errors(response_data["import_errors"]) - assert { + assert response_data == { "import_errors": [ { "filename": "Lorem_ipsum1.py", @@ -196,7 +196,7 @@ def test_get_import_errors_order_by(self, session): }, ], "total_entries": 2, - } == response_data + } def test_order_by_raises_400_for_invalid_attr(self, session): import_error = [ diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index cce496a1a6548..4be804424ed7e 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -190,7 +190,7 @@ def test_should_respond_200_json(self, try_number): info = serializer.loads(response.json["continuation_token"]) assert info == {"end_of_log": True, "log_pos": 16 if try_number == 1 else 18} - assert 200 == response.status_code + assert response.status_code == 200 @pytest.mark.parametrize( "request_url, expected_filename, extra_query_string, try_number", @@ -237,7 +237,7 @@ def test_should_respond_200_text_plain( headers={"Accept": "text/plain"}, environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 log_content = "Log for testing." if try_number == 1 else "Log for testing 2." assert "localhost\n" in response.data.decode("utf-8") @@ -293,7 +293,7 @@ def test_get_logs_of_removed_task(self, request_url, expected_filename, extra_qu environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 log_content = "Log for testing." if try_number == 1 else "Log for testing 2." assert "localhost\n" in response.data.decode("utf-8") @@ -356,7 +356,7 @@ def test_get_logs_for_handler_without_read_method(self, mock_log_reader, try_num headers={"Content-Type": "application/jso"}, environ_overrides={"REMOTE_USER": "test"}, ) - assert 400 == response.status_code + assert response.status_code == 400 assert "Task log handler does not support read logs." in response.data.decode("utf-8") def test_bad_signature_raises(self): diff --git a/tests/api_connexion/endpoints/test_pool_endpoint.py b/tests/api_connexion/endpoints/test_pool_endpoint.py index 6805cefd2f4da..13f87b96c3468 100644 --- a/tests/api_connexion/endpoints/test_pool_endpoint.py +++ b/tests/api_connexion/endpoints/test_pool_endpoint.py @@ -67,7 +67,7 @@ def test_response_200(self, session): assert len(result) == 2 # accounts for the default pool as well response = self.client.get("/api/v1/pools", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 200 - assert { + assert response.json == { "pools": [ { "name": "default_pool", @@ -95,7 +95,7 @@ def test_response_200(self, session): }, ], "total_entries": 2, - } == response.json + } def test_response_200_with_order_by(self, session): pool_model = Pool(pool="test_pool_a", slots=3, include_deferred=True) @@ -105,7 +105,7 @@ def test_response_200_with_order_by(self, session): assert len(result) == 2 # accounts for the default pool as well response = self.client.get("/api/v1/pools?order_by=slots", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 200 - assert { + assert response.json == { "pools": [ { "name": "test_pool_a", @@ -133,7 +133,7 @@ def test_response_200_with_order_by(self, session): }, ], "total_entries": 2, - } == response.json + } def test_should_raises_401_unauthenticated(self): response = self.client.get("/api/v1/pools") @@ -221,7 +221,7 @@ def test_response_200(self, session): session.commit() response = self.client.get("/api/v1/pools/test_pool_a", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 200 - assert { + assert response.json == { "name": "test_pool_a", "slots": 3, "occupied_slots": 0, @@ -232,17 +232,17 @@ def test_response_200(self, session): "open_slots": 3, "description": None, "include_deferred": True, - } == response.json + } def test_response_404(self): response = self.client.get("/api/v1/pools/invalid_pool", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Pool with name:'invalid_pool' not found", "status": 404, "title": "Not Found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self): response = self.client.get("/api/v1/pools/default_pool") @@ -267,12 +267,12 @@ def test_response_204(self, session): def test_response_404(self): response = self.client.delete("api/v1/pools/invalid_pool", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Pool with name:'invalid_pool' not found", "status": 404, "title": "Not Found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self, session): pool_name = "test_pool" @@ -297,7 +297,7 @@ def test_response_200(self, session): environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 200 - assert { + assert response.json == { "name": "test_pool_a", "slots": 3, "occupied_slots": 0, @@ -308,7 +308,7 @@ def test_response_200(self, session): "open_slots": 3, "description": "test pool", "include_deferred": True, - } == response.json + } _check_last_log(session, dag_id=None, event="api.post_pool", logical_date=None) def test_response_409(self, session): @@ -322,12 +322,12 @@ def test_response_409(self, session): environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 409 - assert { + assert response.json == { "detail": f"Pool: {pool_name} already exists", "status": 409, "title": "Conflict", "type": EXCEPTIONS_LINK_MAP[409], - } == response.json + } @pytest.mark.parametrize( "request_json, error_detail", @@ -359,12 +359,12 @@ def test_response_400(self, request_json, error_detail): "api/v1/pools", json=request_json, environ_overrides={"REMOTE_USER": "test"} ) assert response.status_code == 400 - assert { + assert response.json == { "detail": error_detail, "status": 400, "title": "Bad Request", "type": EXCEPTIONS_LINK_MAP[400], - } == response.json + } def test_should_raises_401_unauthenticated(self): response = self.client.post("api/v1/pools", json={"name": "test_pool_a", "slots": 3}) @@ -383,7 +383,7 @@ def test_response_200(self, session): environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 200 - assert { + assert response.json == { "occupied_slots": 0, "queued_slots": 0, "name": "test_pool_a", @@ -394,7 +394,7 @@ def test_response_200(self, session): "slots": 3, "description": None, "include_deferred": False, - } == response.json + } _check_last_log(session, dag_id=None, event="api.patch_pool", logical_date=None) @pytest.mark.parametrize( @@ -420,12 +420,12 @@ def test_response_400(self, error_detail, request_json, session): "api/v1/pools/test_pool", json=request_json, environ_overrides={"REMOTE_USER": "test"} ) assert response.status_code == 400 - assert { + assert response.json == { "detail": error_detail, "status": 400, "title": "Bad Request", "type": EXCEPTIONS_LINK_MAP[400], - } == response.json + } def test_not_found_when_no_pool_available(self): response = self.client.patch( @@ -434,12 +434,12 @@ def test_not_found_when_no_pool_available(self): environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 404 - assert { + assert response.json == { "detail": "Pool with name:'test_pool' not found", "status": 404, "title": "Not Found", "type": EXCEPTIONS_LINK_MAP[404], - } == response.json + } def test_should_raises_401_unauthenticated(self, session): pool = Pool(pool="test_pool", slots=2, include_deferred=False) @@ -458,12 +458,12 @@ class TestModifyDefaultPool(TestBasePoolEndpoints): def test_delete_400(self): response = self.client.delete("api/v1/pools/default_pool", environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 400 - assert { + assert response.json == { "detail": "Default Pool can't be deleted", "status": 400, "title": "Bad Request", "type": EXCEPTIONS_LINK_MAP[400], - } == response.json + } @pytest.mark.parametrize( "status_code, url, json, expected_response", @@ -645,7 +645,7 @@ def test_response_200( session.commit() response = self.client.patch(url, json=patch_json, environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 200 - assert { + assert response.json == { "name": expected_name, "slots": expected_slots, "occupied_slots": 0, @@ -656,7 +656,7 @@ def test_response_200( "open_slots": expected_slots, "description": None, "include_deferred": expected_include_deferred, - } == response.json + } _check_last_log(session, dag_id=None, event="api.patch_pool", logical_date=None) @pytest.mark.parametrize( @@ -695,9 +695,9 @@ def test_response_400(self, error_detail, url, patch_json, session): session.commit() response = self.client.patch(url, json=patch_json, environ_overrides={"REMOTE_USER": "test"}) assert response.status_code == 400 - assert { + assert response.json == { "detail": error_detail, "status": 400, "title": "Bad Request", "type": EXCEPTIONS_LINK_MAP[400], - } == response.json + } diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index 18108edfe284f..f39cff8ae7650 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -1325,7 +1325,7 @@ def test_should_respond_200_with_reset_dag_run(self, session): ) failed_dag_runs = session.query(DagRun).filter(DagRun.state == "failed").count() - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -1415,7 +1415,7 @@ def test_should_respond_200_with_dag_run_id(self, session): environ_overrides={"REMOTE_USER": "test"}, json=payload, ) - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -1425,7 +1425,7 @@ def test_should_respond_200_with_dag_run_id(self, session): }, ] assert response.json["task_instances"] == expected_response - assert 1 == len(response.json["task_instances"]) + assert len(response.json["task_instances"]) == 1 _check_last_log(session, dag_id=dag_id, event="api.post_clear_task_instances", logical_date=None) def test_should_respond_200_with_include_past(self, session): @@ -1473,7 +1473,7 @@ def test_should_respond_200_with_include_past(self, session): environ_overrides={"REMOTE_USER": "test"}, json=payload, ) - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -1514,7 +1514,7 @@ def test_should_respond_200_with_include_past(self, session): ] for task_instance in expected_response: assert task_instance in response.json["task_instances"] - assert 6 == len(response.json["task_instances"]) + assert len(response.json["task_instances"]) == 6 _check_last_log(session, dag_id=dag_id, event="api.post_clear_task_instances", logical_date=None) def test_should_respond_200_with_include_future(self, session): @@ -1563,7 +1563,7 @@ def test_should_respond_200_with_include_future(self, session): json=payload, ) - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -1604,7 +1604,7 @@ def test_should_respond_200_with_include_future(self, session): ] for task_instance in expected_response: assert task_instance in response.json["task_instances"] - assert 6 == len(response.json["task_instances"]) + assert len(response.json["task_instances"]) == 6 _check_last_log(session, dag_id=dag_id, event="api.post_clear_task_instances", logical_date=None) def test_should_respond_404_for_nonexistent_dagrun_id(self, session): @@ -1637,7 +1637,7 @@ def test_should_respond_404_for_nonexistent_dagrun_id(self, session): json=payload, ) - assert 404 == response.status_code + assert response.status_code == 404 assert ( response.json["title"] == "Dag Run id TEST_DAG_RUN_ID_100 not found in dag example_python_operator" diff --git a/tests/api_connexion/endpoints/test_version_endpoint.py b/tests/api_connexion/endpoints/test_version_endpoint.py index 6c21985a73584..fd068cb291bee 100644 --- a/tests/api_connexion/endpoints/test_version_endpoint.py +++ b/tests/api_connexion/endpoints/test_version_endpoint.py @@ -39,6 +39,6 @@ def setup_attrs(self, minimal_app_for_api) -> None: def test_should_respond_200(self, mock_get_airflow_get_commit): response = self.client.get("/api/v1/version") - assert 200 == response.status_code - assert {"git_version": "GIT_COMMIT", "version": "MOCK_VERSION"} == response.json + assert response.status_code == 200 + assert response.json == {"git_version": "GIT_COMMIT", "version": "MOCK_VERSION"} mock_get_airflow_get_commit.assert_called_once_with() diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py b/tests/api_connexion/endpoints/test_xcom_endpoint.py index 312e21a3ab118..4f0072860fd3e 100644 --- a/tests/api_connexion/endpoints/test_xcom_endpoint.py +++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py @@ -118,7 +118,7 @@ def test_should_respond_200_stringify(self): f"/api/v1/dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}", environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 current_data = response.json current_data["timestamp"] = "TIMESTAMP" @@ -144,7 +144,7 @@ def test_should_respond_200_native(self): f"/api/v1/dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}?stringify=false", environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 current_data = response.json current_data["timestamp"] = "TIMESTAMP" @@ -170,7 +170,7 @@ def test_should_raise_404_for_non_existent_xcom(self): f"/api/v1/dags/nonexistentdagid/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}", environ_overrides={"REMOTE_USER": "test"}, ) - assert 404 == response.status_code + assert response.status_code == 404 assert response.json["title"] == "XCom entry not found" def test_should_raises_401_unauthenticated(self): @@ -297,7 +297,7 @@ def test_should_respond_200(self): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 response_data = response.json for xcom_entry in response_data["xcom_entries"]: xcom_entry["timestamp"] = "TIMESTAMP" @@ -344,7 +344,7 @@ def test_should_respond_200_with_tilde_and_access_to_all_dags(self): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 response_data = response.json for xcom_entry in response_data["xcom_entries"]: xcom_entry["timestamp"] = "TIMESTAMP" @@ -404,7 +404,7 @@ def assert_expected_result(expected_entries, map_index=None): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 response_data = response.json for xcom_entry in response_data["xcom_entries"]: xcom_entry["timestamp"] = "TIMESTAMP" @@ -447,7 +447,7 @@ def assert_expected_result(expected_entries, key=None): environ_overrides={"REMOTE_USER": "test"}, ) - assert 200 == response.status_code + assert response.status_code == 200 response_data = response.json for xcom_entry in response_data["xcom_entries"]: xcom_entry["timestamp"] = "TIMESTAMP" diff --git a/tests/api_connexion/schemas/test_common_schema.py b/tests/api_connexion/schemas/test_common_schema.py index 0090603e4f48c..7b5b51d06c04e 100644 --- a/tests/api_connexion/schemas/test_common_schema.py +++ b/tests/api_connexion/schemas/test_common_schema.py @@ -33,7 +33,7 @@ def test_should_serialize(self): instance = datetime.timedelta(days=12) schema_instance = TimeDeltaSchema() result = schema_instance.dump(instance) - assert {"__type": "TimeDelta", "days": 12, "seconds": 0, "microseconds": 0} == result + assert result == {"__type": "TimeDelta", "days": 12, "seconds": 0, "microseconds": 0} def test_should_deserialize(self): instance = {"__type": "TimeDelta", "days": 12, "seconds": 0, "microseconds": 0} @@ -48,7 +48,7 @@ def test_should_serialize(self): instance = relativedelta.relativedelta(days=+12) schema_instance = RelativeDeltaSchema() result = schema_instance.dump(instance) - assert { + assert result == { "__type": "RelativeDelta", "day": None, "days": 12, @@ -65,7 +65,7 @@ def test_should_serialize(self): "seconds": 0, "year": None, "years": 0, - } == result + } def test_should_deserialize(self): instance = {"__type": "RelativeDelta", "days": 12, "seconds": 0} diff --git a/tests/api_connexion/schemas/test_dag_schema.py b/tests/api_connexion/schemas/test_dag_schema.py index ecdfe4eb5ae94..4f1b07fb6e70f 100644 --- a/tests/api_connexion/schemas/test_dag_schema.py +++ b/tests/api_connexion/schemas/test_dag_schema.py @@ -47,7 +47,7 @@ def test_serialize_test_dag_schema(url_safe_serializer): ) serialized_dag = DAGSchema().dump(dag_model) - assert { + assert serialized_dag == { "dag_id": "test_dag_id", "dag_display_name": "test_dag_id", "description": "The description", @@ -71,7 +71,7 @@ def test_serialize_test_dag_schema(url_safe_serializer): "last_parsed_time": None, "timetable_description": None, "has_import_errors": None, - } == serialized_dag + } def test_serialize_test_dag_collection_schema(url_safe_serializer): @@ -79,7 +79,7 @@ def test_serialize_test_dag_collection_schema(url_safe_serializer): dag_model_b = DagModel(dag_id="test_dag_id_b", fileloc="/tmp/a.py") schema = DAGCollectionSchema() instance = DAGCollection(dags=[dag_model_a, dag_model_b], total_entries=2) - assert { + assert schema.dump(instance) == { "dags": [ { "dag_id": "test_dag_id_a", @@ -133,7 +133,7 @@ def test_serialize_test_dag_collection_schema(url_safe_serializer): }, ], "total_entries": 2, - } == schema.dump(instance) + } @pytest.mark.db_test diff --git a/tests/api_connexion/schemas/test_error_schema.py b/tests/api_connexion/schemas/test_error_schema.py index a682435213b51..c953925c90d43 100644 --- a/tests/api_connexion/schemas/test_error_schema.py +++ b/tests/api_connexion/schemas/test_error_schema.py @@ -53,12 +53,12 @@ def test_serialize(self, session): session.commit() serialized_data = import_error_schema.dump(import_error) serialized_data["import_error_id"] = 1 - assert { + assert serialized_data == { "filename": "lorem.py", "import_error_id": 1, "stack_trace": "Lorem Ipsum", "timestamp": "2020-06-10T12:02:44+00:00", - } == serialized_data + } class TestErrorCollectionSchema(TestErrorSchemaBase): @@ -82,7 +82,7 @@ def test_serialize(self, session): # To maintain consistency in the key sequence across the db in tests serialized_data["import_errors"][0]["import_error_id"] = 1 serialized_data["import_errors"][1]["import_error_id"] = 2 - assert { + assert serialized_data == { "import_errors": [ { "filename": "Lorem_ipsum.py", @@ -98,4 +98,4 @@ def test_serialize(self, session): }, ], "total_entries": 2, - } == serialized_data + } diff --git a/tests/api_connexion/schemas/test_pool_schemas.py b/tests/api_connexion/schemas/test_pool_schemas.py index 32b2f62ecfc31..255f2c1ee6eac 100644 --- a/tests/api_connexion/schemas/test_pool_schemas.py +++ b/tests/api_connexion/schemas/test_pool_schemas.py @@ -72,7 +72,7 @@ def test_serialize(self): pool_model_a = Pool(pool="test_pool_a", slots=3, include_deferred=False) pool_model_b = Pool(pool="test_pool_b", slots=3, include_deferred=True) instance = PoolCollection(pools=[pool_model_a, pool_model_b], total_entries=2) - assert { + assert pool_collection_schema.dump(instance) == { "pools": [ { "name": "test_pool_a", @@ -100,4 +100,4 @@ def test_serialize(self): }, ], "total_entries": 2, - } == pool_collection_schema.dump(instance) + } diff --git a/tests/api_connexion/test_error_handling.py b/tests/api_connexion/test_error_handling.py index d89515d05b68f..36752fac60373 100644 --- a/tests/api_connexion/test_error_handling.py +++ b/tests/api_connexion/test_error_handling.py @@ -31,9 +31,9 @@ def test_incorrect_endpoint_should_return_json(minimal_app_for_api): # Then we have parsable JSON as output - assert "Not Found" == resp.json["title"] - assert 404 == resp.json["status"] - assert 404 == resp.status_code + assert resp.json["title"] == "Not Found" + assert resp.json["status"] == 404 + assert resp.status_code == 404 def test_incorrect_endpoint_should_return_html(minimal_app_for_api): @@ -60,9 +60,9 @@ def test_incorrect_method_should_return_json(minimal_app_for_api): # Then we have parsable JSON as output - assert "Method Not Allowed" == resp.json["title"] - assert 405 == resp.json["status"] - assert 405 == resp.status_code + assert resp.json["title"] == "Method Not Allowed" + assert resp.json["status"] == 405 + assert resp.status_code == 405 def test_incorrect_method_should_return_html(minimal_app_for_api): diff --git a/tests/api_fastapi/core_api/routes/public/test_connections.py b/tests/api_fastapi/core_api/routes/public/test_connections.py index 5e58687f3aca1..0410c307fb699 100644 --- a/tests/api_fastapi/core_api/routes/public/test_connections.py +++ b/tests/api_fastapi/core_api/routes/public/test_connections.py @@ -465,9 +465,9 @@ def test_patch_should_respond_400(self, test_client, body): response = test_client.patch(f"/public/connections/{TEST_CONN_ID}", json=body) assert response.status_code == 400 print(response.json()) - assert { + assert response.json() == { "detail": "The connection_id in the request body does not match the URL parameter", - } == response.json() + } @pytest.mark.parametrize( "body", @@ -505,9 +505,9 @@ def test_patch_should_respond_400(self, test_client, body): def test_patch_should_respond_404(self, test_client, body): response = test_client.patch(f"/public/connections/{body['connection_id']}", json=body) assert response.status_code == 404 - assert { + assert response.json() == { "detail": f"The Connection with connection_id: `{body['connection_id']}` was not found", - } == response.json() + } @pytest.mark.enable_redact @pytest.mark.parametrize( diff --git a/tests/api_fastapi/core_api/routes/public/test_dag_run.py b/tests/api_fastapi/core_api/routes/public/test_dag_run.py index ed0ecd8574b78..995b98a61dda8 100644 --- a/tests/api_fastapi/core_api/routes/public/test_dag_run.py +++ b/tests/api_fastapi/core_api/routes/public/test_dag_run.py @@ -1006,8 +1006,8 @@ def test_should_respond_404(self, test_client): ) assert response.status_code == 404 assert ( - "The DagRun with dag_id: `invalid-id` and run_id: `invalid-run-id` was not found" - == response.json()["detail"] + response.json()["detail"] + == "The DagRun with dag_id: `invalid-id` and run_id: `invalid-run-id` was not found" ) diff --git a/tests/api_fastapi/core_api/routes/public/test_dag_sources.py b/tests/api_fastapi/core_api/routes/public/test_dag_sources.py index 6c040d57f9cbb..ec075637750f9 100644 --- a/tests/api_fastapi/core_api/routes/public/test_dag_sources.py +++ b/tests/api_fastapi/core_api/routes/public/test_dag_sources.py @@ -68,7 +68,7 @@ def test_should_respond_200_text(self, test_client, test_dag): response: Response = test_client.get(f"{API_PREFIX}/{TEST_DAG_ID}", headers={"Accept": "text/plain"}) assert isinstance(response, Response) - assert 200 == response.status_code + assert response.status_code == 200 assert dag_content == response.content.decode() with pytest.raises(json.JSONDecodeError): json.loads(response.content.decode()) @@ -84,7 +84,7 @@ def test_should_respond_200_json(self, test_client, test_dag, headers): headers=headers, ) assert isinstance(response, Response) - assert 200 == response.status_code + assert response.status_code == 200 assert response.json() == { "content": dag_content, "dag_id": TEST_DAG_ID, @@ -115,7 +115,7 @@ def test_should_respond_200_version(self, test_client, accept, session, test_dag url = f"{API_PREFIX}/{TEST_DAG_ID}" response = test_client.get(url, headers={"Accept": accept}) - assert 200 == response.status_code + assert response.status_code == 200 if accept == "text/plain": assert dag_content2 == response.content.decode() assert dag_content != response.content.decode() @@ -123,7 +123,7 @@ def test_should_respond_200_version(self, test_client, accept, session, test_dag else: assert dag_content2 == response.json()["content"] assert dag_content != response.json()["content"] - assert "application/json" == response.headers["Content-Type"] + assert response.headers["Content-Type"] == "application/json" assert response.json() == { "content": dag_content2, "dag_id": TEST_DAG_ID, @@ -137,10 +137,10 @@ def test_should_respond_406_unsupport_mime_type(self, test_client): f"{API_PREFIX}/{TEST_DAG_ID}", headers={"Accept": "text/html"}, ) - assert 406 == response.status_code + assert response.status_code == 406 def test_should_respond_404(self, test_client): wrong_fileloc = "abcd1234" url = f"{API_PREFIX}/{wrong_fileloc}" response = test_client.get(url, headers={"Accept": "application/json"}) - assert 404 == response.status_code + assert response.status_code == 404 diff --git a/tests/api_fastapi/core_api/routes/public/test_log.py b/tests/api_fastapi/core_api/routes/public/test_log.py index f638b2f709a78..ae1d7df79d35a 100644 --- a/tests/api_fastapi/core_api/routes/public/test_log.py +++ b/tests/api_fastapi/core_api/routes/public/test_log.py @@ -171,7 +171,7 @@ def test_should_respond_200_json(self, try_number): info = serializer.loads(response.json()["continuation_token"]) assert info == {"end_of_log": True, "log_pos": 16 if try_number == 1 else 18} - assert 200 == response.status_code + assert response.status_code == 200 @pytest.mark.parametrize( "request_url, expected_filename, extra_query_string, try_number", @@ -217,7 +217,7 @@ def test_should_respond_200_text_plain( params={"token": token, **extra_query_string}, headers={"Accept": "text/plain"}, ) - assert 200 == response.status_code + assert response.status_code == 200 log_content = "Log for testing." if try_number == 1 else "Log for testing 2." assert "localhost\n" in response.content.decode("utf-8") @@ -272,7 +272,7 @@ def test_get_logs_of_removed_task(self, request_url, expected_filename, extra_qu headers={"Accept": "text/plain"}, ) - assert 200 == response.status_code + assert response.status_code == 200 log_content = "Log for testing." if try_number == 1 else "Log for testing 2." assert "localhost\n" in response.content.decode("utf-8") @@ -327,7 +327,7 @@ def test_get_logs_for_handler_without_read_method(self, mock_log_reader, try_num params={"token": token}, headers={"Content-Type": "application/jso"}, ) - assert 400 == response.status_code + assert response.status_code == 400 assert "Task log handler does not support read logs." in response.content.decode("utf-8") def test_bad_signature_raises(self): diff --git a/tests/api_fastapi/core_api/routes/public/test_monitor.py b/tests/api_fastapi/core_api/routes/public/test_monitor.py index ebc004edd8897..d736291180a31 100644 --- a/tests/api_fastapi/core_api/routes/public/test_monitor.py +++ b/tests/api_fastapi/core_api/routes/public/test_monitor.py @@ -57,8 +57,8 @@ def test_healthy_scheduler_status(self, test_client, session): assert response.status_code == 200 body = response.json() - assert "healthy" == body["metadatabase"]["status"] - assert "healthy" == body["scheduler"]["status"] + assert body["metadatabase"]["status"] == "healthy" + assert body["scheduler"]["status"] == "healthy" assert ( last_scheduler_heartbeat_for_testing_1.isoformat() == body["scheduler"]["latest_scheduler_heartbeat"] @@ -76,8 +76,8 @@ def test_unhealthy_scheduler_is_slow(self, test_client, session): assert response.status_code == 200 body = response.json() - assert "healthy" == body["metadatabase"]["status"] - assert "unhealthy" == body["scheduler"]["status"] + assert body["metadatabase"]["status"] == "healthy" + assert body["scheduler"]["status"] == "unhealthy" assert ( last_scheduler_heartbeat_for_testing_2.isoformat() == body["scheduler"]["latest_scheduler_heartbeat"] @@ -89,8 +89,8 @@ def test_unhealthy_scheduler_no_job(self, test_client): assert response.status_code == 200 body = response.json() - assert "healthy" == body["metadatabase"]["status"] - assert "unhealthy" == body["scheduler"]["status"] + assert body["metadatabase"]["status"] == "healthy" + assert body["scheduler"]["status"] == "unhealthy" assert body["scheduler"]["latest_scheduler_heartbeat"] is None @mock.patch.object(SchedulerJobRunner, "most_recent_job") @@ -101,5 +101,5 @@ def test_unhealthy_metadatabase_status(self, most_recent_job_mock, test_client): assert response.status_code == 200 body = response.json() - assert "unhealthy" == body["metadatabase"]["status"] + assert body["metadatabase"]["status"] == "unhealthy" assert body["scheduler"]["latest_scheduler_heartbeat"] is None diff --git a/tests/api_fastapi/core_api/routes/public/test_pools.py b/tests/api_fastapi/core_api/routes/public/test_pools.py index 72069aaa6cdb2..5b16052c3c86f 100644 --- a/tests/api_fastapi/core_api/routes/public/test_pools.py +++ b/tests/api_fastapi/core_api/routes/public/test_pools.py @@ -69,7 +69,7 @@ def test_delete_should_respond_400(self, test_client): response = test_client.delete("/public/pools/default_pool") assert response.status_code == 400 body = response.json() - assert "Default Pool can't be deleted" == body["detail"] + assert body["detail"] == "Default Pool can't be deleted" def test_delete_should_respond_404(self, test_client): response = test_client.delete(f"/public/pools/{POOL1_NAME}") diff --git a/tests/api_fastapi/core_api/routes/public/test_task_instances.py b/tests/api_fastapi/core_api/routes/public/test_task_instances.py index 3f008c44911cc..d97fc77b2d80a 100644 --- a/tests/api_fastapi/core_api/routes/public/test_task_instances.py +++ b/tests/api_fastapi/core_api/routes/public/test_task_instances.py @@ -1920,7 +1920,7 @@ def test_should_respond_200_with_reset_dag_run(self, test_client, session): ) failed_dag_runs = session.query(DagRun).filter(DagRun.state == "failed").count() - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -2003,7 +2003,7 @@ def test_should_respond_200_with_dag_run_id(self, test_client, session): f"/public/dags/{dag_id}/clearTaskInstances", json=payload, ) - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -2059,7 +2059,7 @@ def test_should_respond_200_with_include_past(self, test_client, session): f"/public/dags/{dag_id}/clearTaskInstances", json=payload, ) - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -2094,7 +2094,7 @@ def test_should_respond_200_with_include_past(self, test_client, session): ] for task_instance in expected_response: assert task_instance in response.json()["task_instances"] - assert 6 == response.json()["total_entries"] + assert response.json()["total_entries"] == 6 def test_should_respond_200_with_include_future(self, test_client, session): dag_id = "example_python_operator" @@ -2142,7 +2142,7 @@ def test_should_respond_200_with_include_future(self, test_client, session): json=payload, ) - assert 200 == response.status_code + assert response.status_code == 200 expected_response = [ { "dag_id": "example_python_operator", @@ -2208,7 +2208,7 @@ def test_should_respond_404_for_nonexistent_dagrun_id(self, test_client, session json=payload, ) - assert 404 == response.status_code + assert response.status_code == 404 assert f"Dag Run id TEST_DAG_RUN_ID_100 not found in dag {dag_id}" in response.text @pytest.mark.parametrize( diff --git a/tests/api_fastapi/core_api/routes/public/test_variables.py b/tests/api_fastapi/core_api/routes/public/test_variables.py index a8a0e24f1df4a..348ca4c1af6c6 100644 --- a/tests/api_fastapi/core_api/routes/public/test_variables.py +++ b/tests/api_fastapi/core_api/routes/public/test_variables.py @@ -241,7 +241,7 @@ def test_patch_should_respond_400(self, test_client): ) assert response.status_code == 400 body = response.json() - assert "Invalid body, key from request body doesn't match uri parameter" == body["detail"] + assert body["detail"] == "Invalid body, key from request body doesn't match uri parameter" def test_patch_should_respond_404(self, test_client): response = test_client.patch( diff --git a/tests/api_fastapi/core_api/routes/public/test_version.py b/tests/api_fastapi/core_api/routes/public/test_version.py index aa7ee29408811..92d12ce6a88e9 100644 --- a/tests/api_fastapi/core_api/routes/public/test_version.py +++ b/tests/api_fastapi/core_api/routes/public/test_version.py @@ -46,6 +46,6 @@ class TestGetVersion(TestVersionEndpoint): def test_airflow_version_info(self, mock_get_airflow_get_commit, client): response = client().get("/public/version") - assert 200 == response.status_code - assert {"git_version": "GIT_COMMIT", "version": "MOCK_VERSION"} == response.json() + assert response.status_code == 200 + assert response.json() == {"git_version": "GIT_COMMIT", "version": "MOCK_VERSION"} mock_get_airflow_get_commit.assert_called_once_with() diff --git a/tests/charts/log_groomer.py b/tests/charts/log_groomer.py index dcbf63d537cae..f6cde85a4e63e 100644 --- a/tests/charts/log_groomer.py +++ b/tests/charts/log_groomer.py @@ -36,7 +36,7 @@ def test_log_groomer_collector_default_enabled(self): values=values, show_only=[f"templates/{self.folder}/{self.obj_name}-deployment.yaml"] ) - assert 2 == len(jmespath.search("spec.template.spec.containers", docs[0])) + assert len(jmespath.search("spec.template.spec.containers", docs[0])) == 2 assert f"{self.obj_name}-log-groomer" in [ c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0]) ] @@ -72,7 +72,7 @@ def test_log_groomer_collector_default_command_and_args(self): ) assert jmespath.search("spec.template.spec.containers[1].command", docs[0]) is None - assert ["bash", "/clean-logs"] == jmespath.search("spec.template.spec.containers[1].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].args", docs[0]) == ["bash", "/clean-logs"] def test_log_groomer_collector_default_retention_days(self): if self.obj_name == "dag-processor": @@ -84,10 +84,11 @@ def test_log_groomer_collector_default_retention_days(self): values=values, show_only=[f"templates/{self.folder}/{self.obj_name}-deployment.yaml"] ) - assert "AIRFLOW__LOG_RETENTION_DAYS" == jmespath.search( - "spec.template.spec.containers[1].env[0].name", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[1].env[0].name", docs[0]) + == "AIRFLOW__LOG_RETENTION_DAYS" ) - assert "15" == jmespath.search("spec.template.spec.containers[1].env[0].value", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].env[0].value", docs[0]) == "15" @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) @@ -136,8 +137,8 @@ def test_log_groomer_command_and_args_overrides_are_templated(self): show_only=[f"templates/{self.folder}/{self.obj_name}-deployment.yaml"], ) - assert ["release-name"] == jmespath.search("spec.template.spec.containers[1].command", docs[0]) - assert ["Helm"] == jmespath.search("spec.template.spec.containers[1].args", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].command", docs[0]) == ["release-name"] + assert jmespath.search("spec.template.spec.containers[1].args", docs[0]) == ["Helm"] @pytest.mark.parametrize("retention_days, retention_result", [(None, None), (30, "30")]) def test_log_groomer_retention_days_overrides(self, retention_days, retention_result): @@ -154,8 +155,9 @@ def test_log_groomer_retention_days_overrides(self, retention_days, retention_re ) if retention_result: - assert "AIRFLOW__LOG_RETENTION_DAYS" == jmespath.search( - "spec.template.spec.containers[1].env[0].name", docs[0] + assert ( + jmespath.search("spec.template.spec.containers[1].env[0].name", docs[0]) + == "AIRFLOW__LOG_RETENTION_DAYS" ) assert retention_result == jmespath.search( "spec.template.spec.containers[1].env[0].value", docs[0] @@ -193,7 +195,7 @@ def test_log_groomer_resources(self): show_only=[f"templates/{self.folder}/{self.obj_name}-deployment.yaml"], ) - assert { + assert jmespath.search("spec.template.spec.containers[1].resources", docs[0]) == { "limits": { "cpu": "2", "memory": "3Gi", @@ -202,7 +204,7 @@ def test_log_groomer_resources(self): "cpu": "1", "memory": "2Gi", }, - } == jmespath.search("spec.template.spec.containers[1].resources", docs[0]) + } def test_log_groomer_has_airflow_home(self): if self.obj_name == "dag-processor": @@ -214,4 +216,4 @@ def test_log_groomer_has_airflow_home(self): values=values, show_only=[f"templates/{self.folder}/{self.obj_name}-deployment.yaml"] ) - assert "AIRFLOW_HOME" == jmespath.search("spec.template.spec.containers[1].env[1].name", docs[0]) + assert jmespath.search("spec.template.spec.containers[1].env[1].name", docs[0]) == "AIRFLOW_HOME" diff --git a/tests/cli/commands/test_config_command.py b/tests/cli/commands/test_config_command.py index e7db7dac3274e..6207beede9208 100644 --- a/tests/cli/commands/test_config_command.py +++ b/tests/cli/commands/test_config_command.py @@ -214,7 +214,7 @@ def test_should_display_value(self): with contextlib.redirect_stdout(StringIO()) as temp_stdout: config_command.get_value(self.parser.parse_args(["config", "get-value", "core", "test_key"])) - assert "test_value" == temp_stdout.getvalue().strip() + assert temp_stdout.getvalue().strip() == "test_value" @mock.patch("airflow.cli.commands.config_command.conf") def test_should_not_raise_exception_when_section_for_config_with_value_defined_elsewhere_is_missing( diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index 998716d51f645..bfbeb57a12795 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -551,10 +551,10 @@ def test_trigger_dag_output_as_json(self): out = temp_stdout.getvalue().strip().splitlines()[-1] parsed_out = json.loads(out) - assert 1 == len(parsed_out) - assert "example_bash_operator" == parsed_out[0]["dag_id"] - assert "trigger_dag_xxx" == parsed_out[0]["dag_run_id"] - assert {"conf1": "val1", "conf2": "val2"} == parsed_out[0]["conf"] + assert len(parsed_out) == 1 + assert parsed_out[0]["dag_id"] == "example_bash_operator" + assert parsed_out[0]["dag_run_id"] == "trigger_dag_xxx" + assert parsed_out[0]["conf"] == {"conf1": "val1", "conf2": "val2"} def test_delete_dag(self): DM = DagModel diff --git a/tests/cli/commands/test_db_command.py b/tests/cli/commands/test_db_command.py index c80e513a8c91b..3331bb4410923 100644 --- a/tests/cli/commands/test_db_command.py +++ b/tests/cli/commands/test_db_command.py @@ -239,13 +239,13 @@ def test_cli_shell_postgres(self, mock_execute_interactive): _, kwargs = mock_execute_interactive.call_args env = kwargs["env"] postgres_env = {k: v for k, v in env.items() if k.startswith("PG")} - assert { + assert postgres_env == { "PGDATABASE": "airflow", "PGHOST": "postgres", "PGPASSWORD": "airflow", "PGPORT": "5432", "PGUSER": "postgres", - } == postgres_env + } @mock.patch("airflow.cli.commands.db_command.execute_interactive") @mock.patch( @@ -258,13 +258,13 @@ def test_cli_shell_postgres_without_port(self, mock_execute_interactive): _, kwargs = mock_execute_interactive.call_args env = kwargs["env"] postgres_env = {k: v for k, v in env.items() if k.startswith("PG")} - assert { + assert postgres_env == { "PGDATABASE": "airflow", "PGHOST": "postgres", "PGPASSWORD": "airflow", "PGPORT": "5432", "PGUSER": "postgres", - } == postgres_env + } @mock.patch( "airflow.cli.commands.db_command.settings.engine.url", diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index e7573631f10b9..1db61aa6346e2 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -49,7 +49,7 @@ def setup_method(self) -> None: def test_should_remove_pii_from_path(self): home_path = os.path.expanduser("~/airflow/config") - assert "${HOME}/airflow/config" == self.instance.process_path(home_path) + assert self.instance.process_path(home_path) == "${HOME}/airflow/config" @pytest.mark.parametrize( "before, after", diff --git a/tests/cli/commands/test_legacy_commands.py b/tests/cli/commands/test_legacy_commands.py index 6a8405bb34fb8..80d2ae314791e 100644 --- a/tests/cli/commands/test_legacy_commands.py +++ b/tests/cli/commands/test_legacy_commands.py @@ -67,7 +67,7 @@ def test_should_display_value(self): with pytest.raises(SystemExit) as ctx, contextlib.redirect_stderr(StringIO()) as temp_stderr: config_command.get_value(self.parser.parse_args(["worker"])) - assert 2 == ctx.value.code + assert ctx.value.code == 2 assert ( "Command `airflow worker` has been removed. " "Please use `airflow celery worker`" in temp_stderr.getvalue().strip() diff --git a/tests/cli/commands/test_variable_command.py b/tests/cli/commands/test_variable_command.py index fb9c469f4dc2a..0955af4ab7e81 100644 --- a/tests/cli/commands/test_variable_command.py +++ b/tests/cli/commands/test_variable_command.py @@ -75,14 +75,14 @@ def test_variables_get(self): with redirect_stdout(StringIO()) as stdout: variable_command.variables_get(self.parser.parse_args(["variables", "get", "foo"])) - assert '{\n "foo": "bar"\n}\n' == stdout.getvalue() + assert stdout.getvalue() == '{\n "foo": "bar"\n}\n' def test_get_variable_default_value(self): with redirect_stdout(StringIO()) as stdout: variable_command.variables_get( self.parser.parse_args(["variables", "get", "baz", "--default", "bar"]) ) - assert "bar\n" == stdout.getvalue() + assert stdout.getvalue() == "bar\n" def test_get_variable_missing_variable(self): with pytest.raises(SystemExit): @@ -118,11 +118,11 @@ def test_variables_set_different_types(self): ) # Assert value - assert {"foo": "oops"} == Variable.get("dict", deserialize_json=True) - assert ["oops"] == Variable.get("list", deserialize_json=True) - assert "hello string" == Variable.get("str") # cannot json.loads(str) - assert 42 == Variable.get("int", deserialize_json=True) - assert 42.0 == Variable.get("float", deserialize_json=True) + assert Variable.get("dict", deserialize_json=True) == {"foo": "oops"} + assert Variable.get("list", deserialize_json=True) == ["oops"] + assert Variable.get("str") == "hello string" # cannot json.loads(str) + assert Variable.get("int", deserialize_json=True) == 42 + assert Variable.get("float", deserialize_json=True) == 42.0 assert Variable.get("true", deserialize_json=True) is True assert Variable.get("false", deserialize_json=True) is False assert Variable.get("null", deserialize_json=True) is None @@ -135,7 +135,7 @@ def test_variables_set_different_types(self): ["variables", "import", "variables_types.json", "--action-on-existing-key", "skip"] ) ) - assert ["airflow"] == Variable.get("list", deserialize_json=True) # should not be overwritten + assert Variable.get("list", deserialize_json=True) == ["airflow"] # should not be overwritten # test variable import fails on existing when action is set to fail with pytest.raises(SystemExit): @@ -183,8 +183,8 @@ def test_variables_isolation(self, tmp_path): variable_command.variables_delete(self.parser.parse_args(["variables", "delete", "foo"])) variable_command.variables_import(self.parser.parse_args(["variables", "import", os.fspath(path1)])) - assert "original" == Variable.get("bar") - assert '{\n "foo": "bar"\n}' == Variable.get("foo") + assert Variable.get("bar") == "original" + assert Variable.get("foo") == '{\n "foo": "bar"\n}' # Second export variable_command.variables_export(self.parser.parse_args(["variables", "export", os.fspath(path2)])) diff --git a/tests/cli/commands/test_webserver_command.py b/tests/cli/commands/test_webserver_command.py index eb20b04885f13..58ee7c9cb6c1d 100644 --- a/tests/cli/commands/test_webserver_command.py +++ b/tests/cli/commands/test_webserver_command.py @@ -154,7 +154,7 @@ def test_should_detect_changes_in_directory(self, tmp_path): state_b = monitor._generate_plugin_state() assert state_a == state_b - assert 3 == len(state_a) + assert len(state_a) == 3 # Should detect new file (tmp_path / "file4.txt").write_text("A" * 400) @@ -162,7 +162,7 @@ def test_should_detect_changes_in_directory(self, tmp_path): state_c = monitor._generate_plugin_state() assert state_b != state_c - assert 4 == len(state_c) + assert len(state_c) == 4 # Should detect changes in files (tmp_path / "file4.txt").write_text("A" * 450) @@ -170,7 +170,7 @@ def test_should_detect_changes_in_directory(self, tmp_path): state_d = monitor._generate_plugin_state() assert state_c != state_d - assert 4 == len(state_d) + assert len(state_d) == 4 # Should support large files (tmp_path / "file4.txt").write_text("A" * 4_000_000) @@ -178,7 +178,7 @@ def test_should_detect_changes_in_directory(self, tmp_path): state_d = monitor._generate_plugin_state() assert state_c != state_d - assert 4 == len(state_d) + assert len(state_d) == 4 class TestCLIGetNumReadyWorkersRunning: diff --git a/tests/cli/test_cli_parser.py b/tests/cli/test_cli_parser.py index 1f14ff7b077cd..669209c8ac27a 100644 --- a/tests/cli/test_cli_parser.py +++ b/tests/cli/test_cli_parser.py @@ -104,7 +104,7 @@ def test_subcommand_arg_name_conflict(self): for com in command: conflict_arg = [arg for arg, count in Counter(com.args).items() if count > 1] assert ( - [] == conflict_arg + conflict_arg == [] ), f"Command group {group} function {com.name} have conflict args name {conflict_arg}" def test_subcommand_arg_flag_conflict(self): @@ -122,7 +122,7 @@ def test_subcommand_arg_flag_conflict(self): a.flags[0] for a in com.args if (len(a.flags) == 1 and not a.flags[0].startswith("-")) ] conflict_position = [arg for arg, count in Counter(position).items() if count > 1] - assert [] == conflict_position, ( + assert conflict_position == [], ( f"Command group {group} function {com.name} have conflict " f"position flags {conflict_position}" ) @@ -131,14 +131,14 @@ def test_subcommand_arg_flag_conflict(self): a.flags[0] for a in com.args if (len(a.flags) == 1 and a.flags[0].startswith("-")) ] + [a.flags[1] for a in com.args if len(a.flags) == 2] conflict_long_option = [arg for arg, count in Counter(long_option).items() if count > 1] - assert [] == conflict_long_option, ( + assert conflict_long_option == [], ( f"Command group {group} function {com.name} have conflict " f"long option flags {conflict_long_option}" ) short_option = [a.flags[0] for a in com.args if len(a.flags) == 2] conflict_short_option = [arg for arg, count in Counter(short_option).items() if count > 1] - assert [] == conflict_short_option, ( + assert conflict_short_option == [], ( f"Command group {group} function {com.name} have conflict " f"short option flags {conflict_short_option}" ) @@ -333,8 +333,8 @@ def test_dag_cli_should_display_help(self): parser.parse_args([*cmd_args, "--help"]) def test_positive_int(self): - assert 1 == cli_config.positive_int(allow_zero=True)("1") - assert 0 == cli_config.positive_int(allow_zero=True)("0") + assert cli_config.positive_int(allow_zero=True)("1") == 1 + assert cli_config.positive_int(allow_zero=True)("0") == 0 with pytest.raises(argparse.ArgumentTypeError): cli_config.positive_int(allow_zero=False)("0") diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py index ceea18e4563d9..f43fdece8d40b 100644 --- a/tests/core/test_configuration.py +++ b/tests/core/test_configuration.py @@ -224,18 +224,18 @@ def test_command_precedence(self): ("test", "key2"), ("test", "key4"), } - assert "hello" == test_conf.get("test", "key1") - assert "cmd_result" == test_conf.get("test", "key2") - assert "airflow" == test_conf.get("test", "key3") - assert "key4_result" == test_conf.get("test", "key4") - assert "value6" == test_conf.get("another", "key6") - - assert "hello" == test_conf.get("test", "key1", fallback="fb") - assert "value6" == test_conf.get("another", "key6", fallback="fb") - assert "fb" == test_conf.get("another", "key7", fallback="fb") + assert test_conf.get("test", "key1") == "hello" + assert test_conf.get("test", "key2") == "cmd_result" + assert test_conf.get("test", "key3") == "airflow" + assert test_conf.get("test", "key4") == "key4_result" + assert test_conf.get("another", "key6") == "value6" + + assert test_conf.get("test", "key1", fallback="fb") == "hello" + assert test_conf.get("another", "key6", fallback="fb") == "value6" + assert test_conf.get("another", "key7", fallback="fb") == "fb" assert test_conf.getboolean("another", "key8_boolean", fallback="True") is True - assert 10 == test_conf.getint("another", "key8_int", fallback="10") - assert 1.0 == test_conf.getfloat("another", "key8_float", fallback="1") + assert test_conf.getint("another", "key8_int", fallback="10") == 10 + assert test_conf.getfloat("another", "key8_float", fallback="1") == 1.0 assert test_conf.has_option("test", "key1") assert test_conf.has_option("test", "key2") @@ -245,14 +245,14 @@ def test_command_precedence(self): assert test_conf.has_option("another", "key6") cfg_dict = test_conf.as_dict(display_sensitive=True) - assert "cmd_result" == cfg_dict["test"]["key2"] + assert cfg_dict["test"]["key2"] == "cmd_result" assert "key2_cmd" not in cfg_dict["test"] # If we exclude _cmds then we should still see the commands to run, not # their values cfg_dict = test_conf.as_dict(include_cmds=False, display_sensitive=True) assert "key4" not in cfg_dict["test"] - assert "printf key4_result" == cfg_dict["test"]["key4_cmd"] + assert cfg_dict["test"]["key4_cmd"] == "printf key4_result" def test_can_read_dot_section(self): test_config = """[test.abc] @@ -315,7 +315,7 @@ def test_config_from_secret_backend(self, mock_hvac): ("test", "sql_alchemy_conn"), } - assert "sqlite:////Users/airflow/airflow/airflow.db" == test_conf.get("test", "sql_alchemy_conn") + assert test_conf.get("test", "sql_alchemy_conn") == "sqlite:////Users/airflow/airflow/airflow.db" def test_hidding_of_sensitive_config_values(self): test_config = """[test] @@ -330,10 +330,10 @@ def test_hidding_of_sensitive_config_values(self): ("test", "sql_alchemy_conn"), } - assert "airflow" == test_conf.get("test", "sql_alchemy_conn") + assert test_conf.get("test", "sql_alchemy_conn") == "airflow" # Hide sensitive fields asdict = test_conf.as_dict(display_sensitive=False) - assert "< hidden >" == asdict["test"]["sql_alchemy_conn"] + assert asdict["test"]["sql_alchemy_conn"] == "< hidden >" # If display_sensitive is false, then include_cmd, include_env,include_secrets must all be True # This ensures that cmd and secrets env are hidden at the appropriate method and no surprises with pytest.raises(ValueError): @@ -435,7 +435,7 @@ def test_getint(self): ): test_conf.getint("invalid", "key1") assert isinstance(test_conf.getint("valid", "key2"), int) - assert 1 == test_conf.getint("valid", "key2") + assert test_conf.getint("valid", "key2") == 1 def test_getfloat(self): """Test AirflowConfigParser.getfloat""" @@ -456,7 +456,7 @@ def test_getfloat(self): ): test_conf.getfloat("invalid", "key1") assert isinstance(test_conf.getfloat("valid", "key2"), float) - assert 1.23 == test_conf.getfloat("valid", "key2") + assert test_conf.getfloat("valid", "key2") == 1.23 def test_getlist(self): """Test AirflowConfigParser.getlist""" @@ -566,9 +566,9 @@ def test_remove_option(self): test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default)) test_conf.read_string(test_config) - assert "hello" == test_conf.get("test", "key1") + assert test_conf.get("test", "key1") == "hello" test_conf.remove_option("test", "key1", remove_default=False) - assert "awesome" == test_conf.get("test", "key1") + assert test_conf.get("test", "key1") == "awesome" test_conf.remove_option("test", "key2") assert not test_conf.has_option("test", "key2") @@ -591,14 +591,14 @@ def test_getsection(self): test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default)) test_conf.read_string(test_config) - assert {"key1": "hello", "key2": "airflow"} == test_conf.getsection("test") - assert { + assert test_conf.getsection("test") == {"key1": "hello", "key2": "airflow"} + assert test_conf.getsection("testsection") == { "key3": "value3", "testkey": "testvalue", "testpercent": "with%percent", - } == test_conf.getsection("testsection") + } - assert {"key": "value"} == test_conf.getsection("new_section") + assert test_conf.getsection("new_section") == {"key": "value"} assert test_conf.getsection("non_existent_section") is None @@ -623,9 +623,10 @@ def test_kubernetes_environment_variables_section(self): test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default)) test_conf.read_string(test_config) - assert {"key1": "hello", "AIRFLOW_HOME": "/root/airflow"} == test_conf.getsection( - "kubernetes_environment_variables" - ) + assert test_conf.getsection("kubernetes_environment_variables") == { + "key1": "hello", + "AIRFLOW_HOME": "/root/airflow", + } @pytest.mark.parametrize( "key, type", @@ -1089,7 +1090,7 @@ def make_config(): ): test_conf = make_config() assert test_conf.get("core", "hostname_callable") == "CarrierPigeon" - assert [] == warning + assert warning == [] @pytest.mark.parametrize( ("conf_dict", "environ", "expected"), @@ -1558,7 +1559,7 @@ def test_get_options_including_defaults(self): airflow_cfg.remove_all_read_configurations() default_options = airflow_cfg.get_options_including_defaults("core") assert "hostname_callable" in default_options - assert "airflow.utils.net.getfqdn" == airflow_cfg.get("core", "hostname_callable") + assert airflow_cfg.get("core", "hostname_callable") == "airflow.utils.net.getfqdn" assert "test-key" not in default_options no_options = airflow_cfg.get_options_including_defaults("test-section") assert no_options == [] @@ -1566,15 +1567,15 @@ def test_get_options_including_defaults(self): airflow_cfg.set("test-section", "test-key", "test-value") test_section_options = airflow_cfg.get_options_including_defaults("test-section") assert "test-key" in test_section_options - assert "airflow.utils.net.getfqdn" == airflow_cfg.get("core", "hostname_callable") + assert airflow_cfg.get("core", "hostname_callable") == "airflow.utils.net.getfqdn" airflow_cfg.add_section("core") airflow_cfg.set("core", "new-test-key", "test-value") airflow_cfg.set("core", "hostname_callable", "test-fn") all_core_options_including_defaults = airflow_cfg.get_options_including_defaults("core") assert "new-test-key" in all_core_options_including_defaults assert "dags_folder" in all_core_options_including_defaults - assert "test-value" == airflow_cfg.get("core", "new-test-key") - assert "test-fn" == airflow_cfg.get("core", "hostname_callable") + assert airflow_cfg.get("core", "new-test-key") == "test-value" + assert airflow_cfg.get("core", "hostname_callable") == "test-fn" assert sum(1 for option in all_core_options_including_defaults if option == "hostname_callable") == 1 diff --git a/tests/dags/test_mark_state.py b/tests/dags/test_mark_state.py index 3ce67b7cef208..1990c8c03c6ae 100644 --- a/tests/dags/test_mark_state.py +++ b/tests/dags/test_mark_state.py @@ -79,7 +79,7 @@ def check_failure(context): def test_mark_failure_externally(ti): - assert State.RUNNING == ti.state + assert ti.state == State.RUNNING with create_session() as session: ti.log.info("Marking TI as failed 'externally'") ti.state = State.FAILED @@ -100,7 +100,7 @@ def test_mark_failure_externally(ti): def test_mark_skipped_externally(ti): - assert State.RUNNING == ti.state + assert ti.state == State.RUNNING sleep(0.1) # for timeout with create_session() as session: ti.log.info("Marking TI as failed 'externally'") diff --git a/tests/decorators/test_python.py b/tests/decorators/test_python.py index 22f2f620ec20f..a90bccafa41fd 100644 --- a/tests/decorators/test_python.py +++ b/tests/decorators/test_python.py @@ -387,7 +387,7 @@ def do_run(): with self.dag_non_serialized: do_run() - assert ["some_name"] == self.dag_non_serialized.task_ids + assert self.dag_non_serialized.task_ids == ["some_name"] def test_multiple_calls(self): """Test calling task multiple times in a DAG""" @@ -398,10 +398,10 @@ def do_run(): with self.dag_non_serialized: do_run() - assert ["do_run"] == self.dag_non_serialized.task_ids + assert self.dag_non_serialized.task_ids == ["do_run"] do_run_1 = do_run() do_run_2 = do_run() - assert ["do_run", "do_run__1", "do_run__2"] == self.dag_non_serialized.task_ids + assert self.dag_non_serialized.task_ids == ["do_run", "do_run__1", "do_run__2"] assert do_run_1.operator.task_id == "do_run__1" assert do_run_2.operator.task_id == "do_run__2" diff --git a/tests/executors/test_executor_loader.py b/tests/executors/test_executor_loader.py index 83e99d7139db3..7532c878dc51f 100644 --- a/tests/executors/test_executor_loader.py +++ b/tests/executors/test_executor_loader.py @@ -82,7 +82,7 @@ def test_should_support_custom_path(self): with conf_vars({("core", "executor"): "tests.executors.test_executor_loader.FakeExecutor"}): executor = ExecutorLoader.get_default_executor() assert executor is not None - assert "FakeExecutor" == executor.__class__.__name__ + assert executor.__class__.__name__ == "FakeExecutor" assert executor.name is not None assert executor.name == ExecutorName("tests.executors.test_executor_loader.FakeExecutor") assert executor.name.connector_source == ConnectorSource.CUSTOM_PATH @@ -216,7 +216,7 @@ def test_should_support_import_executor_from_core(self, executor_config, expecte def test_should_support_import_custom_path(self, executor_config): with conf_vars({("core", "executor"): executor_config}): executor, import_source = ExecutorLoader.import_default_executor_cls() - assert "FakeExecutor" == executor.__name__ + assert executor.__name__ == "FakeExecutor" assert import_source == ConnectorSource.CUSTOM_PATH @pytest.mark.db_test diff --git a/tests/integration/executors/test_celery_executor.py b/tests/integration/executors/test_celery_executor.py index a29355da2e8ee..0f9f0b45ae9c1 100644 --- a/tests/integration/executors/test_celery_executor.py +++ b/tests/integration/executors/test_celery_executor.py @@ -227,7 +227,7 @@ def fake_execute_command(): executor.queued_tasks[key] = value_tuple executor.task_publish_retries[key] = 1 executor.heartbeat() - assert 0 == len(executor.queued_tasks), "Task should no longer be queued" + assert len(executor.queued_tasks) == 0, "Task should no longer be queued" assert executor.event_buffer[("fail", "fake_simple_ti", when, 0)][0] == State.FAILED def test_retry_on_error_sending_task(self, caplog): @@ -267,25 +267,25 @@ def test_retry_on_error_sending_task(self, caplog): # Test that when heartbeat is called again, task is published again to Celery Queue executor.heartbeat() assert dict(executor.task_publish_retries) == {key: 1} - assert 1 == len(executor.queued_tasks), "Task should remain in queue" + assert len(executor.queued_tasks) == 1, "Task should remain in queue" assert executor.event_buffer == {} assert f"[Try 1 of 3] Task Timeout Error for Task: ({key})." in caplog.text executor.heartbeat() assert dict(executor.task_publish_retries) == {key: 2} - assert 1 == len(executor.queued_tasks), "Task should remain in queue" + assert len(executor.queued_tasks) == 1, "Task should remain in queue" assert executor.event_buffer == {} assert f"[Try 2 of 3] Task Timeout Error for Task: ({key})." in caplog.text executor.heartbeat() assert dict(executor.task_publish_retries) == {key: 3} - assert 1 == len(executor.queued_tasks), "Task should remain in queue" + assert len(executor.queued_tasks) == 1, "Task should remain in queue" assert executor.event_buffer == {} assert f"[Try 3 of 3] Task Timeout Error for Task: ({key})." in caplog.text executor.heartbeat() assert dict(executor.task_publish_retries) == {} - assert 0 == len(executor.queued_tasks), "Task should no longer be in queue" + assert len(executor.queued_tasks) == 0, "Task should no longer be in queue" assert executor.event_buffer[("fail", "fake_simple_ti", when, 0)][0] == State.FAILED diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py index 521aea920e951..255779e370859 100644 --- a/tests/jobs/test_local_task_job.py +++ b/tests/jobs/test_local_task_job.py @@ -365,7 +365,7 @@ def test_mark_success_no_kill(self, caplog, get_test_dag, session): with timeout(30): run_job(job=job1, execute_callable=job_runner._execute) ti.refresh_from_db() - assert State.SUCCESS == ti.state + assert ti.state == State.SUCCESS assert ( "State of this instance has been externally set to success. Terminating instance." in caplog.text ) diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 64f97b60b3be7..4ba0b6febf8df 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -546,7 +546,7 @@ def test_execute_task_instances_is_paused_wont_execute(self, session, dag_maker) self.job_runner._critical_section_enqueue_task_instances(session) session.flush() ti1.refresh_from_db(session=session) - assert State.SCHEDULED == ti1.state + assert ti1.state == State.SCHEDULED session.rollback() # @pytest.mark.usefixtures("mock_executor") @@ -720,7 +720,7 @@ def test_find_executable_task_instances_pool(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) session.flush() - assert 3 == len(res) + assert len(res) == 3 res_keys = [] for ti in res: res_keys.append(ti.key) @@ -861,7 +861,7 @@ def test_find_executable_task_instances_executor(self, dag_maker, mock_executors res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 5 == len(res) + assert len(res) == 5 res_ti_keys = [res_ti.key for res_ti in res] for ti in tis_tuple: assert ti.key in res_ti_keys @@ -900,7 +900,7 @@ def test_find_executable_task_instances_order_priority_with_pools(self, dag_make res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 2 == len(res) + assert len(res) == 2 assert ti3.key == res[0].key assert ti2.key == res[1].key @@ -958,14 +958,14 @@ def test_find_executable_task_instances_in_default_pool(self, dag_maker, mock_ex # Two tasks w/o pool up for execution and our default pool size is 1 res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 1 == len(res) + assert len(res) == 1 ti2.state = State.RUNNING session.flush() # One task w/o pool up for execution and one task running res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 0 == len(res) + assert len(res) == 0 session.rollback() session.close() @@ -995,7 +995,7 @@ def test_queued_task_instances_fails_with_missing_dag(self, dag_maker, session): session.flush() res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) session.flush() - assert 0 == len(res) + assert len(res) == 0 tis = dr.get_task_instances(session=session) assert len(tis) == 2 assert all(ti.state == State.FAILED for ti in tis) @@ -1018,7 +1018,7 @@ def test_nonexistent_pool(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) session.flush() - assert 0 == len(res) + assert len(res) == 0 session.rollback() def test_infinite_pool(self, dag_maker): @@ -1045,7 +1045,7 @@ def test_infinite_pool(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) session.flush() - assert 1 == len(res) + assert len(res) == 1 session.rollback() def test_not_enough_pool_slots(self, caplog, dag_maker): @@ -1102,7 +1102,7 @@ def test_find_executable_task_instances_none(self, dag_maker): self.job_runner = SchedulerJobRunner(job=scheduler_job, subdir=os.devnull) session = settings.Session() - assert 0 == len(self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session)) + assert len(self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session)) == 0 session.rollback() def test_tis_for_queued_dagruns_are_not_run(self, dag_maker): @@ -1128,7 +1128,7 @@ def test_tis_for_queued_dagruns_are_not_run(self, dag_maker): session.flush() res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 1 == len(res) + assert len(res) == 1 assert ti2.key == res[0].key ti1.refresh_from_db() ti2.refresh_from_db() @@ -1231,7 +1231,7 @@ def test_find_executable_task_instances_max_active_tis_per_dag(self, dag_maker): loader_mock.side_effect = executor.get_mock_loader_side_effect() res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 2 == len(res) + assert len(res) == 2 ti1_1.state = State.RUNNING ti2.state = State.RUNNING @@ -1244,7 +1244,7 @@ def test_find_executable_task_instances_max_active_tis_per_dag(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 1 == len(res) + assert len(res) == 1 ti1_2.state = State.RUNNING ti1_3 = dr3.get_task_instance(task1.task_id) @@ -1255,7 +1255,7 @@ def test_find_executable_task_instances_max_active_tis_per_dag(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 0 == len(res) + assert len(res) == 0 ti1_1.state = State.SCHEDULED ti1_2.state = State.SCHEDULED @@ -1267,7 +1267,7 @@ def test_find_executable_task_instances_max_active_tis_per_dag(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 2 == len(res) + assert len(res) == 2 ti1_1.state = State.RUNNING ti1_2.state = State.SCHEDULED @@ -1279,7 +1279,7 @@ def test_find_executable_task_instances_max_active_tis_per_dag(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 1 == len(res) + assert len(res) == 1 session.rollback() def test_change_state_for_executable_task_instances_no_tis_with_state(self, dag_maker): @@ -1309,7 +1309,7 @@ def test_change_state_for_executable_task_instances_no_tis_with_state(self, dag_ session.flush() res = self.job_runner._executable_task_instances_to_queued(max_tis=100, session=session) - assert 0 == len(res) + assert len(res) == 0 session.rollback() @@ -1336,7 +1336,7 @@ def test_find_executable_task_instances_not_enough_pool_slots_for_first(self, da # Schedule ti with lower priority, # because the one with higher priority is limited by a concurrency limit res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 1 == len(res) + assert len(res) == 1 assert res[0].key == ti2.key session.rollback() @@ -1373,7 +1373,7 @@ def test_find_executable_task_instances_not_enough_dag_concurrency_for_first(sel # Schedule ti with lower priority, # because the one with higher priority is limited by a concurrency limit res = self.job_runner._executable_task_instances_to_queued(max_tis=1, session=session) - assert 1 == len(res) + assert len(res) == 1 assert res[0].key == ti2.key session.rollback() @@ -1402,7 +1402,7 @@ def test_find_executable_task_instances_not_enough_task_concurrency_for_first(se # Schedule ti with lower priority, # because the one with higher priority is limited by a concurrency limit res = self.job_runner._executable_task_instances_to_queued(max_tis=1, session=session) - assert 1 == len(res) + assert len(res) == 1 assert res[0].key == ti1b.key session.rollback() @@ -1431,7 +1431,7 @@ def test_find_executable_task_instances_task_concurrency_per_dagrun_for_first(se # Schedule ti with higher priority, # because it's running in a different DAG run with 0 active tis res = self.job_runner._executable_task_instances_to_queued(max_tis=1, session=session) - assert 1 == len(res) + assert len(res) == 1 assert res[0].key == ti2a.key session.rollback() @@ -1464,7 +1464,7 @@ def test_find_executable_task_instances_not_enough_task_concurrency_per_dagrun_f # Schedule ti with lower priority, # because the one with higher priority is limited by a concurrency limit res = self.job_runner._executable_task_instances_to_queued(max_tis=1, session=session) - assert 1 == len(res) + assert len(res) == 1 assert res[0].key == ti1b.key session.rollback() @@ -1501,7 +1501,7 @@ def test_find_executable_task_instances_negative_open_pool_slots(self, dag_maker session.flush() res = self.job_runner._executable_task_instances_to_queued(max_tis=1, session=session) - assert 1 == len(res) + assert len(res) == 1 assert res[0].key == ti1.key session.rollback() @@ -1525,7 +1525,7 @@ def test_emit_pool_starving_tasks_metrics(self, mock_stats_gauge, dag_maker): session.flush() res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 0 == len(res) + assert len(res) == 0 mock_stats_gauge.assert_has_calls( [ @@ -1541,7 +1541,7 @@ def test_emit_pool_starving_tasks_metrics(self, mock_stats_gauge, dag_maker): session.flush() res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert 1 == len(res) + assert len(res) == 1 mock_stats_gauge.assert_has_calls( [ @@ -1733,7 +1733,7 @@ def _create_dagruns(): scheduler_job.max_tis_per_query = 6 # First pass we'll grab 6 of the 8 tasks (limited by max_tis_per_query) res = self.job_runner._critical_section_enqueue_task_instances(session) - assert 6 == res + assert res == 6 for ti in tis1[:3] + tis2[:3]: ti.refresh_from_db() assert ti.state == TaskInstanceState.QUEUED @@ -1743,11 +1743,11 @@ def _create_dagruns(): # The remaining TIs are queued res = self.job_runner._critical_section_enqueue_task_instances(session) - assert 2 == res + assert res == 2 for ti in tis1 + tis2: ti.refresh_from_db() - assert State.QUEUED == ti.state + assert ti.state == State.QUEUED @pytest.mark.parametrize( "task1_exec, task2_exec", @@ -1796,7 +1796,7 @@ def _create_dagruns(): scheduler_job.max_tis_per_query = 2 total_enqueued = self.job_runner._critical_section_enqueue_task_instances(session) - assert 2 == total_enqueued + assert total_enqueued == 2 def test_execute_task_instances_limit_slots(self, dag_maker, mock_executors): dag_id = "SchedulerJobTest.test_execute_task_instances_limit" @@ -2872,7 +2872,7 @@ def test_do_not_schedule_removed_task(self, dag_maker): res = self.job_runner._executable_task_instances_to_queued(max_tis=32, session=session) - assert [] == res + assert res == [] @pytest.mark.parametrize( "ti_states, run_state", @@ -2956,7 +2956,7 @@ def test_scheduler_start_date(self, configs): # zero tasks ran assert len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()) == 0 session.commit() - assert [] == self.null_exec.sorted_tasks + assert self.null_exec.sorted_tasks == [] # previously, running this backfill would kick off the Scheduler # because it would take the most recent run and start from there @@ -2983,7 +2983,7 @@ def test_scheduler_start_date(self, configs): # still one task assert len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()) == 1 session.commit() - assert [] == self.null_exec.sorted_tasks + assert self.null_exec.sorted_tasks == [] @pytest.mark.parametrize( "configs", @@ -3587,7 +3587,7 @@ def test_adopt_or_reset_orphaned_tasks_nothing(self): scheduler_job = Job() self.job_runner = SchedulerJobRunner(job=scheduler_job) session = settings.Session() - assert 0 == self.job_runner.adopt_or_reset_orphaned_tasks(session=session) + assert self.job_runner.adopt_or_reset_orphaned_tasks(session=session) == 0 @pytest.mark.parametrize( "adoptable_state", @@ -3613,7 +3613,7 @@ def test_adopt_or_reset_resettable_tasks(self, dag_maker, adoptable_state, sessi session.commit() num_reset_tis = self.job_runner.adopt_or_reset_orphaned_tasks(session=session) - assert 1 == num_reset_tis + assert num_reset_tis == 1 def test_adopt_or_reset_orphaned_tasks_external_triggered_dag(self, dag_maker, session): dag_id = "test_reset_orphaned_tasks_external_triggered_dag" @@ -3660,7 +3660,7 @@ def test_adopt_or_reset_orphaned_tasks_backfill_dag(self, dag_maker): session.flush() assert dr1.run_type == DagRunType.BACKFILL_JOB - assert 0 == self.job_runner.adopt_or_reset_orphaned_tasks(session=session) + assert self.job_runner.adopt_or_reset_orphaned_tasks(session=session) == 0 session.rollback() def test_reset_orphaned_tasks_no_orphans(self, dag_maker): @@ -3704,13 +3704,13 @@ def test_reset_orphaned_tasks_non_running_dagruns(self, dag_maker): dr1 = dag_maker.create_dagrun() dr1.state = State.QUEUED tis = dr1.get_task_instances(session=session) - assert 1 == len(tis) + assert len(tis) == 1 tis[0].state = State.SCHEDULED session.merge(dr1) session.merge(tis[0]) session.flush() - assert 0 == self.job_runner.adopt_or_reset_orphaned_tasks(session=session) + assert self.job_runner.adopt_or_reset_orphaned_tasks(session=session) == 0 session.rollback() def test_adopt_or_reset_orphaned_tasks_stale_scheduler_jobs(self, dag_maker): @@ -3753,7 +3753,7 @@ def test_adopt_or_reset_orphaned_tasks_stale_scheduler_jobs(self, dag_maker): num_reset_tis = self.job_runner.adopt_or_reset_orphaned_tasks(session=session) - assert 1 == num_reset_tis + assert num_reset_tis == 1 session.refresh(ti1) assert ti1.state is None @@ -6001,8 +6001,8 @@ def test_should_mark_empty_task_as_success(self): tis = session.query(TaskInstance).all() dags = self.job_runner.dagbag.dags.values() - assert ["test_only_empty_tasks"] == [dag.dag_id for dag in dags] - assert 6 == len(tis) + assert [dag.dag_id for dag in dags] == ["test_only_empty_tasks"] + assert len(tis) == 6 assert { ("test_task_a", "success"), ("test_task_b", None), @@ -6017,7 +6017,7 @@ def test_should_mark_empty_task_as_success(self): if state == "success": assert start_date is not None assert end_date is not None - assert 0.0 == duration + assert duration == 0.0 else: assert start_date is None assert end_date is None @@ -6027,7 +6027,7 @@ def test_should_mark_empty_task_as_success(self): with create_session() as session: tis = session.query(TaskInstance).all() - assert 6 == len(tis) + assert len(tis) == 6 assert { ("test_task_a", "success"), ("test_task_b", "success"), @@ -6042,7 +6042,7 @@ def test_should_mark_empty_task_as_success(self): if state == "success": assert start_date is not None assert end_date is not None - assert 0.0 == duration + assert duration == 0.0 else: assert start_date is None assert end_date is None diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index 3041fe7b7c67c..638f012a3a5a0 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -350,12 +350,12 @@ def test_chain(self): assert [op5] == op3.get_direct_relatives(upstream=False) assert {op4, op5} == set(op6.get_direct_relatives(upstream=True)) - assert {"label": "label1"} == dag.get_edge_info( - upstream_task_id=op1.task_id, downstream_task_id=op2.task_id - ) - assert {"label": "label2"} == dag.get_edge_info( - upstream_task_id=op1.task_id, downstream_task_id=op3.task_id - ) + assert dag.get_edge_info(upstream_task_id=op1.task_id, downstream_task_id=op2.task_id) == { + "label": "label1" + } + assert dag.get_edge_info(upstream_task_id=op1.task_id, downstream_task_id=op3.task_id) == { + "label": "label2" + } # Begin test for `XComArgs` with `EdgeModifiers` [xlabel1, xlabel2] = [Label(label=f"xcomarg_label{i}") for i in range(1, 3)] @@ -370,12 +370,12 @@ def test_chain(self): assert [xop5.operator] == xop3.operator.get_direct_relatives(upstream=False) assert {xop4.operator, xop5.operator} == set(xop6.operator.get_direct_relatives(upstream=True)) - assert {"label": "xcomarg_label1"} == dag.get_edge_info( + assert dag.get_edge_info( upstream_task_id=xop1.operator.task_id, downstream_task_id=xop2.operator.task_id - ) - assert {"label": "xcomarg_label2"} == dag.get_edge_info( + ) == {"label": "xcomarg_label1"} + assert dag.get_edge_info( upstream_task_id=xop1.operator.task_id, downstream_task_id=xop3.operator.task_id - ) + ) == {"label": "xcomarg_label2"} # Begin test for `TaskGroups` [tg1, tg2] = [TaskGroup(group_id=f"tg{i}", dag=dag) for i in range(1, 3)] diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index af9bce55376f5..a651c7114d603 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -358,22 +358,35 @@ def test_get_num_task_instances(self): session.merge(ti4) session.commit() - assert 0 == DAG.get_num_task_instances(test_dag_id, task_ids=["fakename"], session=session) - assert 4 == DAG.get_num_task_instances(test_dag_id, task_ids=[test_task_id], session=session) - assert 4 == DAG.get_num_task_instances( - test_dag_id, task_ids=["fakename", test_task_id], session=session + assert DAG.get_num_task_instances(test_dag_id, task_ids=["fakename"], session=session) == 0 + assert DAG.get_num_task_instances(test_dag_id, task_ids=[test_task_id], session=session) == 4 + assert ( + DAG.get_num_task_instances(test_dag_id, task_ids=["fakename", test_task_id], session=session) == 4 ) - assert 1 == DAG.get_num_task_instances( - test_dag_id, task_ids=[test_task_id], states=[None], session=session + assert ( + DAG.get_num_task_instances(test_dag_id, task_ids=[test_task_id], states=[None], session=session) + == 1 ) - assert 2 == DAG.get_num_task_instances( - test_dag_id, task_ids=[test_task_id], states=[State.RUNNING], session=session + assert ( + DAG.get_num_task_instances( + test_dag_id, task_ids=[test_task_id], states=[State.RUNNING], session=session + ) + == 2 ) - assert 3 == DAG.get_num_task_instances( - test_dag_id, task_ids=[test_task_id], states=[None, State.RUNNING], session=session + assert ( + DAG.get_num_task_instances( + test_dag_id, task_ids=[test_task_id], states=[None, State.RUNNING], session=session + ) + == 3 ) - assert 4 == DAG.get_num_task_instances( - test_dag_id, task_ids=[test_task_id], states=[None, State.QUEUED, State.RUNNING], session=session + assert ( + DAG.get_num_task_instances( + test_dag_id, + task_ids=[test_task_id], + states=[None, State.QUEUED, State.RUNNING], + session=session, + ) + == 4 ) session.close() @@ -1135,11 +1148,11 @@ def test_schedule_dag_no_previous_runs(self): assert dag_run is not None assert dag.dag_id == dag_run.dag_id assert dag_run.run_id is not None - assert "" != dag_run.run_id + assert dag_run.run_id != "" assert ( - TEST_DATE == dag_run.logical_date + dag_run.logical_date == TEST_DATE ), f"dag_run.logical_date did not match expectation: {dag_run.logical_date}" - assert State.RUNNING == dag_run.state + assert dag_run.state == State.RUNNING assert not dag_run.external_trigger dag.clear() self._clean_up(dag_id) @@ -2432,7 +2445,7 @@ def test_dags_needing_dagruns_asset_triggered_dag_info_queued_times(self, sessio session.flush() query, asset_triggered_dag_info = DagModel.dags_needing_dagruns(session) - assert 1 == len(asset_triggered_dag_info) + assert len(asset_triggered_dag_info) == 1 assert dag.dag_id in asset_triggered_dag_info first_queued_time, last_queued_time = asset_triggered_dag_info[dag.dag_id] assert first_queued_time == DEFAULT_DATE diff --git a/tests/models/test_dag_version.py b/tests/models/test_dag_version.py index 32f0efbcb3e69..f3a14108dc873 100644 --- a/tests/models/test_dag_version.py +++ b/tests/models/test_dag_version.py @@ -59,7 +59,7 @@ def test_writing_dag_version_with_changes(self, dag_maker, session): latest_version = DagVersion.get_latest_version(dag.dag_id) assert latest_version.version_number == 2 - assert 2 == session.scalar(select(func.count()).where(DagVersion.dag_id == dag.dag_id)) + assert session.scalar(select(func.count()).where(DagVersion.dag_id == dag.dag_id)) == 2 @pytest.mark.need_serialized_dag def test_get_version(self, dag_maker, session): diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index a40b06c9ba988..81f94e09b2ccb 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -152,7 +152,7 @@ def test_process_file_that_contains_multi_bytes_char(self, tmp_path): path.write_text("\u3042") # write multi-byte char (hiragana) dagbag = DagBag(dag_folder=os.fspath(path.parent), include_examples=False) - assert [] == dagbag.process_file(os.fspath(path)) + assert dagbag.process_file(os.fspath(path)) == [] def test_process_file_duplicated_dag_id(self, tmp_path): """Loading a DAG with ID that already existed in a DAG bag should result in an import error.""" @@ -344,9 +344,9 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True): dagbag.process_file_calls # Should not call process_file again, since it's already loaded during init. - assert 1 == dagbag.process_file_calls + assert dagbag.process_file_calls == 1 assert dagbag.get_dag(dag_id) is not None - assert 1 == dagbag.process_file_calls + assert dagbag.process_file_calls == 1 @pytest.mark.parametrize( ("file_to_load", "expected"), @@ -377,7 +377,7 @@ def test_get_dag_registration(self, file_to_load, expected): def test_dag_registration_with_failure(self): dagbag = DagBag(dag_folder=os.devnull, include_examples=False) found = dagbag.process_file(str(TEST_DAGS_FOLDER / "test_invalid_dup_task.py")) - assert [] == found + assert found == [] @pytest.fixture def zip_with_valid_dag_and_dup_tasks(self, tmp_path: pathlib.Path) -> str: @@ -392,8 +392,8 @@ def zip_with_valid_dag_and_dup_tasks(self, tmp_path: pathlib.Path) -> str: def test_dag_registration_with_failure_zipped(self, zip_with_valid_dag_and_dup_tasks): dagbag = DagBag(dag_folder=os.devnull, include_examples=False) found = dagbag.process_file(zip_with_valid_dag_and_dup_tasks) - assert 1 == len(found) - assert ["test_example_bash_operator"] == [dag.dag_id for dag in found] + assert len(found) == 1 + assert [dag.dag_id for dag in found] == ["test_example_bash_operator"] @patch.object(DagModel, "get_current") def test_refresh_py_dag(self, mock_dagmodel, tmp_path): @@ -418,11 +418,11 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True): dagbag = _TestDagBag(dag_folder=os.fspath(tmp_path), include_examples=True) - assert 1 == dagbag.process_file_calls + assert dagbag.process_file_calls == 1 dag = dagbag.get_dag(dag_id) assert dag is not None assert dag_id == dag.dag_id - assert 2 == dagbag.process_file_calls + assert dagbag.process_file_calls == 2 @patch.object(DagModel, "get_current") def test_refresh_packaged_dag(self, mock_dagmodel): @@ -446,11 +446,11 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True): dagbag = _TestDagBag(dag_folder=os.path.realpath(TEST_DAGS_FOLDER), include_examples=False) - assert 1 == dagbag.process_file_calls + assert dagbag.process_file_calls == 1 dag = dagbag.get_dag(dag_id) assert dag is not None assert dag_id == dag.dag_id - assert 2 == dagbag.process_file_calls + assert dagbag.process_file_calls == 2 def test_dag_removed_if_serialized_dag_is_removed(self, dag_maker, tmp_path): """ @@ -544,7 +544,7 @@ def test_process_file_with_none(self, tmp_path): """ dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False) - assert [] == dagbag.process_file(None) + assert dagbag.process_file(None) == [] def test_deactivate_unknown_dags(self): """ @@ -969,7 +969,7 @@ def test_task_cluster_policy_obeyed(self): dagbag = DagBag(dag_folder=dag_file, include_examples=False) assert {"test_with_non_default_owner"} == set(dagbag.dag_ids) - assert {} == dagbag.import_errors + assert dagbag.import_errors == {} @patch("airflow.settings.dag_policy", cluster_policies.dag_policy) def test_dag_cluster_policy_obeyed(self): diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index abeabc2459468..b9ef0b886007e 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -195,14 +195,14 @@ def test_dagrun_find(self, session): session.commit() - assert 1 == len(DagRun.find(dag_id=dag_id1, external_trigger=True)) - assert 1 == len(DagRun.find(run_id=dag_id1)) - assert 2 == len(DagRun.find(run_id=[dag_id1, dag_id2])) - assert 2 == len(DagRun.find(logical_date=[now, now])) - assert 2 == len(DagRun.find(logical_date=now)) - assert 0 == len(DagRun.find(dag_id=dag_id1, external_trigger=False)) - assert 0 == len(DagRun.find(dag_id=dag_id2, external_trigger=True)) - assert 1 == len(DagRun.find(dag_id=dag_id2, external_trigger=False)) + assert len(DagRun.find(dag_id=dag_id1, external_trigger=True)) == 1 + assert len(DagRun.find(run_id=dag_id1)) == 1 + assert len(DagRun.find(run_id=[dag_id1, dag_id2])) == 2 + assert len(DagRun.find(logical_date=[now, now])) == 2 + assert len(DagRun.find(logical_date=now)) == 2 + assert len(DagRun.find(dag_id=dag_id1, external_trigger=False)) == 0 + assert len(DagRun.find(dag_id=dag_id2, external_trigger=True)) == 0 + assert len(DagRun.find(dag_id=dag_id2, external_trigger=False)) == 1 def test_dagrun_find_duplicate(self, session): now = timezone.utcnow() @@ -250,7 +250,7 @@ def test_dagrun_success_when_all_skipped(self, session): dag_run = self.create_dag_run(dag=dag, task_states=initial_task_states, session=session) dag_run.update_state() - assert DagRunState.SUCCESS == dag_run.state + assert dag_run.state == DagRunState.SUCCESS def test_dagrun_not_stuck_in_running_when_all_tasks_instances_are_removed(self, session): """ @@ -277,7 +277,7 @@ def test_dagrun_not_stuck_in_running_when_all_tasks_instances_are_removed(self, dag_run = self.create_dag_run(dag=dag, task_states=initial_task_states, session=session) dag_run.update_state() - assert DagRunState.SUCCESS == dag_run.state + assert dag_run.state == DagRunState.SUCCESS def test_dagrun_success_conditions(self, session): dag = DAG( @@ -321,14 +321,14 @@ def test_dagrun_success_conditions(self, session): # root is successful, but unfinished tasks dr.update_state() - assert DagRunState.RUNNING == dr.state + assert dr.state == DagRunState.RUNNING # one has failed, but root is successful ti_op2.set_state(state=TaskInstanceState.FAILED, session=session) ti_op3.set_state(state=TaskInstanceState.SUCCESS, session=session) ti_op4.set_state(state=TaskInstanceState.SUCCESS, session=session) dr.update_state() - assert DagRunState.SUCCESS == dr.state + assert dr.state == DagRunState.SUCCESS def test_dagrun_deadlock(self, session): dag = DAG( @@ -461,7 +461,7 @@ def on_success_callable(context): dag_run = self.create_dag_run(dag=dag, task_states=initial_task_states, session=session) _, callback = dag_run.update_state() - assert DagRunState.SUCCESS == dag_run.state + assert dag_run.state == DagRunState.SUCCESS # Callbacks are not added until handle_callback = False is passed to dag_run.update_state() assert callback is None @@ -489,7 +489,7 @@ def on_failure_callable(context): dag_run = self.create_dag_run(dag=dag, task_states=initial_task_states, session=session) _, callback = dag_run.update_state() - assert DagRunState.FAILED == dag_run.state + assert dag_run.state == DagRunState.FAILED # Callbacks are not added until handle_callback = False is passed to dag_run.update_state() assert callback is None @@ -515,7 +515,7 @@ def test_on_success_callback_when_task_skipped(self, session): task = dag_run.get_task_instances()[0] assert task.state == TaskInstanceState.SKIPPED - assert DagRunState.SUCCESS == dag_run.state + assert dag_run.state == DagRunState.SUCCESS mock_on_success.assert_called_once() def test_dagrun_update_state_with_handle_callback_success(self, session): @@ -545,7 +545,7 @@ def on_success_callable(context): dag_run = self.create_dag_run(dag=dag, task_states=initial_task_states, session=session) _, callback = dag_run.update_state(execute_callbacks=False) - assert DagRunState.SUCCESS == dag_run.state + assert dag_run.state == DagRunState.SUCCESS # Callbacks are not added until handle_callback = False is passed to dag_run.update_state() assert callback == DagCallbackRequest( @@ -584,7 +584,7 @@ def on_failure_callable(context): dag_run = self.create_dag_run(dag=dag, task_states=initial_task_states, session=session) _, callback = dag_run.update_state(execute_callbacks=False) - assert DagRunState.FAILED == dag_run.state + assert dag_run.state == DagRunState.FAILED # Callbacks are not added until handle_callback = False is passed to dag_run.update_state() assert callback == DagCallbackRequest( @@ -761,7 +761,7 @@ def with_all_tasks_removed(dag): dagrun = self.create_dag_run(dag, session=session) flaky_ti = dagrun.get_task_instances()[0] - assert "flaky_task" == flaky_ti.task_id + assert flaky_ti.task_id == "flaky_task" assert flaky_ti.state is None dagrun.dag = with_all_tasks_removed(dag) @@ -782,7 +782,7 @@ def test_already_added_task_instances_can_be_ignored(self, session): dagrun = self.create_dag_run(dag, session=session) first_ti = dagrun.get_task_instances()[0] - assert "first_task" == first_ti.task_id + assert first_ti.task_id == "first_task" assert first_ti.state is None # Lets assume that the above TI was added into DB by webserver, but if scheduler diff --git a/tests/models/test_pool.py b/tests/models/test_pool.py index 8bbb5eaa8f00d..b02e0015f623d 100644 --- a/tests/models/test_pool.py +++ b/tests/models/test_pool.py @@ -98,12 +98,12 @@ def test_open_slots(self, dag_maker): session.commit() session.close() - assert 3 == pool.open_slots() - assert 1 == pool.running_slots() - assert 1 == pool.queued_slots() - assert 2 == pool.occupied_slots() - assert 1 == pool.deferred_slots() - assert { + assert pool.open_slots() == 3 + assert pool.running_slots() == 1 + assert pool.queued_slots() == 1 + assert pool.occupied_slots() == 2 + assert pool.deferred_slots() == 1 + assert pool.slots_stats() == { "default_pool": { "open": 128, "queued": 0, @@ -120,7 +120,7 @@ def test_open_slots(self, dag_maker): "scheduled": 0, "total": 5, }, - } == pool.slots_stats() + } def test_open_slots_including_deferred(self, dag_maker): pool = Pool(pool="test_pool", slots=5, include_deferred=True) @@ -145,12 +145,12 @@ def test_open_slots_including_deferred(self, dag_maker): session.commit() session.close() - assert 3 == pool.open_slots() - assert 1 == pool.running_slots() - assert 0 == pool.queued_slots() - assert 1 == pool.deferred_slots() - assert 2 == pool.occupied_slots() - assert { + assert pool.open_slots() == 3 + assert pool.running_slots() == 1 + assert pool.queued_slots() == 0 + assert pool.deferred_slots() == 1 + assert pool.occupied_slots() == 2 + assert pool.slots_stats() == { "default_pool": { "open": 128, "queued": 0, @@ -167,7 +167,7 @@ def test_open_slots_including_deferred(self, dag_maker): "scheduled": 0, "total": 5, }, - } == pool.slots_stats() + } def test_infinite_slots(self, dag_maker): pool = Pool(pool="test_pool", slots=-1, include_deferred=False) @@ -194,10 +194,10 @@ def test_infinite_slots(self, dag_maker): session.close() assert float("inf") == pool.open_slots() - assert 1 == pool.running_slots() - assert 1 == pool.queued_slots() - assert 2 == pool.occupied_slots() - assert { + assert pool.running_slots() == 1 + assert pool.queued_slots() == 1 + assert pool.occupied_slots() == 2 + assert pool.slots_stats() == { "default_pool": { "open": 128, "queued": 0, @@ -214,11 +214,11 @@ def test_infinite_slots(self, dag_maker): "scheduled": 0, "deferred": 0, }, - } == pool.slots_stats() + } def test_default_pool_open_slots(self, dag_maker): set_default_pool_slots(5) - assert 5 == Pool.get_default_pool().open_slots() + assert Pool.get_default_pool().open_slots() == 5 with dag_maker( dag_id="test_default_pool_open_slots", @@ -246,8 +246,8 @@ def test_default_pool_open_slots(self, dag_maker): session.commit() session.close() - assert 2 == Pool.get_default_pool().open_slots() - assert { + assert Pool.get_default_pool().open_slots() == 2 + assert Pool.slots_stats() == { "default_pool": { "open": 2, "queued": 2, @@ -256,7 +256,7 @@ def test_default_pool_open_slots(self, dag_maker): "scheduled": 1, "deferred": 0, } - } == Pool.slots_stats() + } def test_get_pool(self): self.add_pools() diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py index 85120b9cee000..3f1b13cd1a35d 100644 --- a/tests/models/test_renderedtifields.py +++ b/tests/models/test_renderedtifields.py @@ -163,11 +163,11 @@ def test_get_templated_fields(self, templated_field, expected_rendered_field, da session.add(rtif) session.flush() - assert { + assert RTIF.get_templated_fields(ti=ti, session=session) == { "bash_command": expected_rendered_field, "env": None, "cwd": None, - } == RTIF.get_templated_fields(ti=ti, session=session) + } # Test the else part of get_templated_fields # i.e. for the TIs that are not stored in RTIF table # Fetching them will return None @@ -309,7 +309,7 @@ def test_write(self, dag_maker): session = settings.Session() result = session.query(RTIF).all() - assert [] == result + assert result == [] with dag_maker("test_write"): task = BashOperator(task_id="test", bash_command="echo {{ var.value.test_key }}") @@ -329,7 +329,7 @@ def test_write(self, dag_maker): ) .first() ) - assert ("test_write", "test", {"bash_command": "echo test_val", "env": None, "cwd": None}) == result + assert result == ("test_write", "test", {"bash_command": "echo test_val", "env": None, "cwd": None}) # Test that overwrite saves new values to the DB Variable.delete("test_key") @@ -352,11 +352,11 @@ def test_write(self, dag_maker): ) .first() ) - assert ( + assert result_updated == ( "test_write", "test", {"bash_command": "echo test_val_updated", "env": None, "cwd": None}, - ) == result_updated + ) @mock.patch.dict(os.environ, {"AIRFLOW_VAR_API_KEY": "secret"}) @mock.patch("airflow.utils.log.secrets_masker.redact", autospec=True) diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 179fe8f46f4ff..51803e788cbbc 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -439,7 +439,7 @@ def test_ti_updates_with_task(self, create_task_instance, session=None): ti.run(session=session) tis = dag.get_task_instances() - assert {"foo": "bar"} == tis[0].executor_config + assert tis[0].executor_config == {"foo": "bar"} task2 = EmptyOperator( task_id="test_run_pooling_task_op2", executor_config={"bar": "baz"}, @@ -455,7 +455,7 @@ def test_ti_updates_with_task(self, create_task_instance, session=None): # Ensure it's reloaded ti2.executor_config = None ti2.refresh_from_db(session) - assert {"bar": "baz"} == ti2.executor_config + assert ti2.executor_config == {"bar": "baz"} session.rollback() def test_run_pooling_task_with_mark_success(self, create_task_instance): @@ -492,7 +492,7 @@ def raise_skip_exception(): ti = dr.task_instances[0] ti.task = task ti.run() - assert State.SKIPPED == ti.state + assert ti.state == State.SKIPPED def test_task_sigterm_calls_on_failure_callback(self, dag_maker, caplog): """ @@ -2020,9 +2020,9 @@ def test_get_num_running_task_instances(self, create_task_instance): assert ti3 in session session.commit() - assert 1 == ti1.get_num_running_task_instances(session=session) - assert 1 == ti2.get_num_running_task_instances(session=session) - assert 1 == ti3.get_num_running_task_instances(session=session) + assert ti1.get_num_running_task_instances(session=session) == 1 + assert ti2.get_num_running_task_instances(session=session) == 1 + assert ti3.get_num_running_task_instances(session=session) == 1 def test_get_num_running_task_instances_per_dagrun(self, create_task_instance, dag_maker): session = settings.Session() @@ -2063,15 +2063,15 @@ def test_get_num_running_task_instances_per_dagrun(self, create_task_instance, d session.commit() - assert 1 == tis1[("task_1", 0)].get_num_running_task_instances(session=session, same_dagrun=True) - assert 1 == tis1[("task_1", 1)].get_num_running_task_instances(session=session, same_dagrun=True) - assert 2 == tis1[("task_2", 0)].get_num_running_task_instances(session=session) - assert 1 == tis1[("task_3", 0)].get_num_running_task_instances(session=session, same_dagrun=True) + assert tis1[("task_1", 0)].get_num_running_task_instances(session=session, same_dagrun=True) == 1 + assert tis1[("task_1", 1)].get_num_running_task_instances(session=session, same_dagrun=True) == 1 + assert tis1[("task_2", 0)].get_num_running_task_instances(session=session) == 2 + assert tis1[("task_3", 0)].get_num_running_task_instances(session=session, same_dagrun=True) == 1 - assert 1 == tis2[("task_1", 0)].get_num_running_task_instances(session=session, same_dagrun=True) - assert 1 == tis2[("task_1", 1)].get_num_running_task_instances(session=session, same_dagrun=True) - assert 2 == tis2[("task_2", 0)].get_num_running_task_instances(session=session) - assert 1 == tis2[("task_3", 0)].get_num_running_task_instances(session=session, same_dagrun=True) + assert tis2[("task_1", 0)].get_num_running_task_instances(session=session, same_dagrun=True) == 1 + assert tis2[("task_1", 1)].get_num_running_task_instances(session=session, same_dagrun=True) == 1 + assert tis2[("task_2", 0)].get_num_running_task_instances(session=session) == 2 + assert tis2[("task_3", 0)].get_num_running_task_instances(session=session, same_dagrun=True) == 1 def test_log_url(self, create_task_instance): ti = create_task_instance(dag_id="my_dag", task_id="op", logical_date=timezone.datetime(2018, 1, 1)) @@ -2162,8 +2162,8 @@ def test_email_alert_with_config(self, mock_send_email, dag_maker): (email, title, body), _ = mock_send_email.call_args assert email == "to" - assert "template: test_email_alert_with_config" == title - assert "template: test_email_alert_with_config" == body + assert title == "template: test_email_alert_with_config" + assert body == "template: test_email_alert_with_config" @patch("airflow.models.taskinstance.send_email") def test_email_alert_with_filenotfound_config(self, mock_send_email, dag_maker): @@ -3623,7 +3623,7 @@ def fail(): ti.task = task with contextlib.suppress(AirflowException): ti.run() - assert State.FAILED == ti.state + assert ti.state == State.FAILED def test_retries_on_other_exceptions(self, dag_maker): def fail(): @@ -3639,7 +3639,7 @@ def fail(): ti.task = task with contextlib.suppress(AirflowException): ti.run() - assert State.UP_FOR_RETRY == ti.state + assert ti.state == State.UP_FOR_RETRY @patch.object(TaskInstance, "logger") def test_stacktrace_on_failure_starts_with_task_execute_method(self, mock_get_log, dag_maker): @@ -3666,8 +3666,8 @@ def fail(): assert sys.modules[TaskInstance.__module__].__file__ == filename, "".join(formatted_exc) def _env_var_check_callback(self): - assert "test_echo_env_variables" == os.environ["AIRFLOW_CTX_DAG_ID"] - assert "hive_in_python_op" == os.environ["AIRFLOW_CTX_TASK_ID"] + assert os.environ["AIRFLOW_CTX_DAG_ID"] == "test_echo_env_variables" + assert os.environ["AIRFLOW_CTX_TASK_ID"] == "hive_in_python_op" assert DEFAULT_DATE.isoformat() == os.environ["AIRFLOW_CTX_LOGICAL_DATE"] assert DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE) == os.environ["AIRFLOW_CTX_DAG_RUN_ID"] @@ -3856,7 +3856,7 @@ def test_get_rendered_template_fields(self, dag_maker, session=None): new_ti = TI(task=new_task, run_id=ti.run_id) new_ti.get_rendered_template_fields(session=session) - assert "op1" == ti.task.bash_command + assert ti.task.bash_command == "op1" # CleanUp with create_session() as session: @@ -4007,7 +4007,7 @@ def raise_skip_exception(): ti = dr.task_instances[0] ti.task = task ti.run() - assert State.SKIPPED == ti.state + assert ti.state == State.SKIPPED on_skipped_callback_function.assert_called_once() on_success_callback_function.assert_not_called() diff --git a/tests/models/test_taskmixin.py b/tests/models/test_taskmixin.py index 903f215ce79cd..7cfd58732889f 100644 --- a/tests/models/test_taskmixin.py +++ b/tests/models/test_taskmixin.py @@ -266,7 +266,7 @@ def test_set_downstream_list(self, dag_maker): op_a >> [op_b >> op_c, op_d] - assert [] == op_b.upstream_list + assert op_b.upstream_list == [] assert [op_a] == op_d.upstream_list assert {op_a, op_b} == set(op_c.upstream_list) @@ -281,8 +281,8 @@ def test_set_upstream_inner_list(self, dag_maker): assert str(e_info.value) == "'list' object has no attribute 'update_relative'" - assert [] == op_b.upstream_list - assert [] == op_c.upstream_list + assert op_b.upstream_list == [] + assert op_c.upstream_list == [] assert {op_b, op_c} == set(op_d.upstream_list) def test_set_downstream_inner_list(self, dag_maker): @@ -294,8 +294,8 @@ def test_set_downstream_inner_list(self, dag_maker): op_a >> [[op_b, op_c] >> op_d] - assert [] == op_b.upstream_list - assert [] == op_c.upstream_list + assert op_b.upstream_list == [] + assert op_c.upstream_list == [] assert {op_b, op_c, op_a} == set(op_d.upstream_list) def test_set_upstream_list_subarray(self, dag_maker): @@ -311,9 +311,9 @@ def test_set_upstream_list_subarray(self, dag_maker): assert str(e_info.value) == "'list' object has no attribute 'update_relative'" - assert [] == op_b_1.upstream_list - assert [] == op_b_2.upstream_list - assert [] == op_d.upstream_list + assert op_b_1.upstream_list == [] + assert op_b_2.upstream_list == [] + assert op_d.upstream_list == [] assert {op_b_1, op_b_2} == set(op_c.upstream_list) def test_set_downstream_list_subarray(self, dag_maker): @@ -326,7 +326,7 @@ def test_set_downstream_list_subarray(self, dag_maker): op_a >> [[op_b_1, op_b_2] >> op_c, op_d] - assert [] == op_b_1.upstream_list - assert [] == op_b_2.upstream_list + assert op_b_1.upstream_list == [] + assert op_b_2.upstream_list == [] assert [op_a] == op_d.upstream_list assert {op_a, op_b_1, op_b_2} == set(op_c.upstream_list) diff --git a/tests/models/test_variable.py b/tests/models/test_variable.py index 207eb94e05003..67a1079d0198f 100644 --- a/tests/models/test_variable.py +++ b/tests/models/test_variable.py @@ -99,7 +99,7 @@ def test_var_with_encryption_rotate_fernet_key(self, test_value, session): def test_variable_set_get_round_trip(self): Variable.set("tested_var_set_id", "Monday morning breakfast") - assert "Monday morning breakfast" == Variable.get("tested_var_set_id") + assert Variable.get("tested_var_set_id") == "Monday morning breakfast" def test_variable_set_with_env_variable(self, caplog, session): caplog.set_level(logging.WARNING, logger=variable.log.name) @@ -108,11 +108,11 @@ def test_variable_set_with_env_variable(self, caplog, session): # setting value while shadowed by an env variable will generate a warning Variable.set(key="key", value="new-db-value", session=session) # value set above is not returned because the env variable value takes priority - assert "env-value" == Variable.get("key") + assert Variable.get("key") == "env-value" # invalidate the cache to re-evaluate value SecretCache.invalidate_variable("key") # now that env var is not here anymore, we see the value we set before. - assert "new-db-value" == Variable.get("key") + assert Variable.get("key") == "new-db-value" assert caplog.messages[0] == ( "The variable key is defined in the EnvironmentVariablesBackend secrets backend, " @@ -147,9 +147,9 @@ def test_variable_set_get_round_trip_json(self): def test_variable_update(self, session): Variable.set(key="test_key", value="value1", session=session) - assert "value1" == Variable.get(key="test_key") + assert Variable.get(key="test_key") == "value1" Variable.update(key="test_key", value="value2", session=session) - assert "value2" == Variable.get("test_key") + assert Variable.get("test_key") == "value2" def test_variable_update_fails_on_non_metastore_variable(self, session): with mock.patch.dict("os.environ", AIRFLOW_VAR_KEY="env-value"): @@ -175,7 +175,7 @@ def test_variable_set_existing_value_to_blank(self, session): test_key = "test_key" Variable.set(key=test_key, value=test_value, session=session) Variable.set(key=test_key, value="", session=session) - assert "" == Variable.get("test_key") + assert Variable.get("test_key") == "" def test_get_non_existing_var_should_return_default(self): default_value = "some default val" diff --git a/tests/operators/test_latest_only_operator.py b/tests/operators/test_latest_only_operator.py index 0182be26f6bd8..7c6b086994d7c 100644 --- a/tests/operators/test_latest_only_operator.py +++ b/tests/operators/test_latest_only_operator.py @@ -131,35 +131,35 @@ def test_skipping_non_latest(self, dag_maker): latest_instances = get_task_instances("latest") exec_date_to_latest_state = {ti.logical_date: ti.state for ti in latest_instances} - assert { + assert exec_date_to_latest_state == { timezone.datetime(2016, 1, 1): "success", timezone.datetime(2016, 1, 1, 12): "success", timezone.datetime(2016, 1, 2): "success", - } == exec_date_to_latest_state + } downstream_instances = get_task_instances("downstream") exec_date_to_downstream_state = {ti.logical_date: ti.state for ti in downstream_instances} - assert { + assert exec_date_to_downstream_state == { timezone.datetime(2016, 1, 1): "skipped", timezone.datetime(2016, 1, 1, 12): "skipped", timezone.datetime(2016, 1, 2): "success", - } == exec_date_to_downstream_state + } downstream_instances = get_task_instances("downstream_2") exec_date_to_downstream_state = {ti.logical_date: ti.state for ti in downstream_instances} - assert { + assert exec_date_to_downstream_state == { timezone.datetime(2016, 1, 1): None, timezone.datetime(2016, 1, 1, 12): None, timezone.datetime(2016, 1, 2): "success", - } == exec_date_to_downstream_state + } downstream_instances = get_task_instances("downstream_3") exec_date_to_downstream_state = {ti.logical_date: ti.state for ti in downstream_instances} - assert { + assert exec_date_to_downstream_state == { timezone.datetime(2016, 1, 1): "success", timezone.datetime(2016, 1, 1, 12): "success", timezone.datetime(2016, 1, 2): "success", - } == exec_date_to_downstream_state + } def test_not_skipping_external(self, dag_maker): with dag_maker( @@ -211,24 +211,24 @@ def test_not_skipping_external(self, dag_maker): latest_instances = get_task_instances("latest") exec_date_to_latest_state = {ti.logical_date: ti.state for ti in latest_instances} - assert { + assert exec_date_to_latest_state == { timezone.datetime(2016, 1, 1): "success", timezone.datetime(2016, 1, 1, 12): "success", timezone.datetime(2016, 1, 2): "success", - } == exec_date_to_latest_state + } downstream_instances = get_task_instances("downstream") exec_date_to_downstream_state = {ti.logical_date: ti.state for ti in downstream_instances} - assert { + assert exec_date_to_downstream_state == { timezone.datetime(2016, 1, 1): "success", timezone.datetime(2016, 1, 1, 12): "success", timezone.datetime(2016, 1, 2): "success", - } == exec_date_to_downstream_state + } downstream_instances = get_task_instances("downstream_2") exec_date_to_downstream_state = {ti.logical_date: ti.state for ti in downstream_instances} - assert { + assert exec_date_to_downstream_state == { timezone.datetime(2016, 1, 1): "success", timezone.datetime(2016, 1, 1, 12): "success", timezone.datetime(2016, 1, 2): "success", - } == exec_date_to_downstream_state + } diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index e3369a7c4f718..0f2636670b367 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -142,7 +142,7 @@ def test_app_blueprints(self): def test_app_static_folder(self): # Blueprint static folder should be properly set - assert AIRFLOW_SOURCES_ROOT / "airflow" / "www" / "static" == Path(self.app.static_folder).resolve() + assert Path(self.app.static_folder).resolve() == AIRFLOW_SOURCES_ROOT / "airflow" / "www" / "static" @pytest.mark.db_test @@ -187,7 +187,7 @@ def test_loads_filesystem_plugins(self, caplog): assert len(plugins_manager.plugins) == 9 for plugin in plugins_manager.plugins: if "AirflowTestOnLoadPlugin" in str(plugin): - assert "postload" == plugin.name + assert plugin.name == "postload" break else: pytest.fail("Wasn't able to find a registered `AirflowTestOnLoadPlugin`") @@ -366,11 +366,11 @@ def test_registering_plugin_listeners(self): listeners = get_listener_manager().pm.get_plugins() listener_names = [el.__name__ if inspect.ismodule(el) else qualname(el) for el in listeners] # sort names as order of listeners is not guaranteed - assert [ + assert sorted(listener_names) == [ "airflow.example_dags.plugins.event_listener", "tests.listeners.class_listener.ClassBasedListener", "tests.listeners.empty_listener", - ] == sorted(listener_names) + ] def test_should_import_plugin_from_providers(self): from airflow import plugins_manager @@ -404,9 +404,9 @@ def test_should_return_correct_path_name(self): from airflow import plugins_manager source = plugins_manager.PluginsDirectorySource(__file__) - assert "test_plugins_manager.py" == source.path - assert "$PLUGINS_FOLDER/test_plugins_manager.py" == str(source) - assert "$PLUGINS_FOLDER/test_plugins_manager.py" == source.__html__() + assert source.path == "test_plugins_manager.py" + assert str(source) == "$PLUGINS_FOLDER/test_plugins_manager.py" + assert source.__html__() == "$PLUGINS_FOLDER/test_plugins_manager.py" class TestEntryPointSource: diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index d5dfc1a5fb01f..28ccfe4284587 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -1507,14 +1507,14 @@ def test_external_task_marker_cyclic_shallow(dag_bag_cyclic): tis = clear_tasks(dag_bag, dag_0, task_a_0, dry_run=True) - assert [ + assert sorted((ti.dag_id, ti.task_id) for ti in tis) == [ ("dag_0", "task_a_0"), ("dag_0", "task_b_0"), ("dag_1", "task_a_1"), ("dag_1", "task_b_1"), ("dag_2", "task_a_2"), ("dag_2", "task_b_2"), - ] == sorted((ti.dag_id, ti.task_id) for ti in tis) + ] @pytest.fixture diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 42d07b39220e5..d7dbf54c1869e 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -1064,7 +1064,7 @@ def test_extra_serialized_field_and_operator_links( # Test Deserialized link registered via Airflow Plugin link = simple_task.get_extra_links(ti, GoogleLink.name) - assert "https://www.google.com" == link + assert link == "https://www.google.com" @pytest.mark.usefixtures("clear_all_logger_handlers") def test_extra_operator_links_logs_error_for_non_registered_extra_links(self): diff --git a/tests/task/test_standard_task_runner.py b/tests/task/test_standard_task_runner.py index 6aebbd1a9be6e..e573a4961f50e 100644 --- a/tests/task/test_standard_task_runner.py +++ b/tests/task/test_standard_task_runner.py @@ -444,7 +444,7 @@ def test_on_kill(self): logger.info("The file appeared") with path_on_kill_killed.open() as f: - assert "ON_KILL_TEST" == f.readline() + assert f.readline() == "ON_KILL_TEST" for process in processes: assert not psutil.pid_exists(process.pid), f"{process} is still alive" diff --git a/tests/utils/log/test_log_reader.py b/tests/utils/log/test_log_reader.py index 283933e850f70..fe0f13297c86f 100644 --- a/tests/utils/log/test_log_reader.py +++ b/tests/utils/log/test_log_reader.py @@ -194,7 +194,7 @@ def test_read_log_stream_should_support_multiple_chunks(self, mock_read): task_log_reader = TaskLogReader() self.ti.state = TaskInstanceState.SUCCESS log_stream = task_log_reader.read_log_stream(ti=self.ti, try_number=1, metadata={}) - assert ["\n1st line\n", "\n2nd line\n", "\n3rd line\n"] == list(log_stream) + assert list(log_stream) == ["\n1st line\n", "\n2nd line\n", "\n3rd line\n"] mock_read.assert_has_calls( [ @@ -215,7 +215,7 @@ def test_read_log_stream_should_read_each_try_in_turn(self, mock_read): task_log_reader = TaskLogReader() log_stream = task_log_reader.read_log_stream(ti=self.ti, try_number=None, metadata={}) - assert ["\ntry_number=1.\n", "\ntry_number=2.\n", "\ntry_number=3.\n"] == list(log_stream) + assert list(log_stream) == ["\ntry_number=1.\n", "\ntry_number=2.\n", "\ntry_number=3.\n"] mock_read.assert_has_calls( [ diff --git a/tests/utils/test_email.py b/tests/utils/test_email.py index 55f18edf75296..dfcae90215c56 100644 --- a/tests/utils/test_email.py +++ b/tests/utils/test_email.py @@ -164,11 +164,11 @@ def test_send_smtp(self, mock_send_mime, tmp_path): assert mock_send_mime.called _, call_args = mock_send_mime.call_args assert conf.get("smtp", "SMTP_MAIL_FROM") == call_args["e_from"] - assert ["to"] == call_args["e_to"] + assert call_args["e_to"] == ["to"] msg = call_args["mime_msg"] - assert "subject" == msg["Subject"] + assert msg["Subject"] == "subject" assert conf.get("smtp", "SMTP_MAIL_FROM") == msg["From"] - assert 2 == len(msg.get_payload()) + assert len(msg.get_payload()) == 2 filename = f'attachment; filename="{path.name}"' assert filename == msg.get_payload()[-1].get("Content-Disposition") mimeapp = MIMEApplication("attachment") @@ -199,11 +199,11 @@ def test_send_bcc_smtp(self, mock_send_mime, tmp_path): assert mock_send_mime.called _, call_args = mock_send_mime.call_args assert conf.get("smtp", "SMTP_MAIL_FROM") == call_args["e_from"] - assert ["to", "cc", "bcc"] == call_args["e_to"] + assert call_args["e_to"] == ["to", "cc", "bcc"] msg = call_args["mime_msg"] - assert "subject" == msg["Subject"] + assert msg["Subject"] == "subject" assert conf.get("smtp", "SMTP_MAIL_FROM") == msg["From"] - assert 2 == len(msg.get_payload()) + assert len(msg.get_payload()) == 2 assert f'attachment; filename="{path.name}"' == msg.get_payload()[-1].get("Content-Disposition") mimeapp = MIMEApplication("attachment") assert mimeapp.get_payload() == msg.get_payload()[-1].get_payload() diff --git a/tests/utils/test_file.py b/tests/utils/test_file.py index dd521ec54bb34..da1c6fa2fc87f 100644 --- a/tests/utils/test_file.py +++ b/tests/utils/test_file.py @@ -63,7 +63,7 @@ def test_correct_maybe_zipped_archive(self, mocked_is_zipfile): assert mocked_is_zipfile.call_count == 1 (args, kwargs) = mocked_is_zipfile.call_args_list[0] - assert "/path/to/archive.zip" == args[0] + assert args[0] == "/path/to/archive.zip" assert dag_folder == "/path/to/archive.zip" diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 46fefd4202632..4fbe774f86948 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -36,11 +36,11 @@ class TestGetHostname: @mock.patch("airflow.utils.net.getfqdn", return_value="first") @conf_vars({("core", "hostname_callable"): None}) def test_get_hostname_unset(self, mock_getfqdn): - assert "first" == net.get_hostname() + assert net.get_hostname() == "first" @conf_vars({("core", "hostname_callable"): "tests.utils.test_net.get_hostname"}) def test_get_hostname_set(self): - assert "awesomehostname" == net.get_hostname() + assert net.get_hostname() == "awesomehostname" @conf_vars({("core", "hostname_callable"): "tests.utils.test_net"}) def test_get_hostname_set_incorrect(self): diff --git a/tests/utils/test_operator_helpers.py b/tests/utils/test_operator_helpers.py index ac04050d338b2..1f6b6df4c6198 100644 --- a/tests/utils/test_operator_helpers.py +++ b/tests/utils/test_operator_helpers.py @@ -89,13 +89,13 @@ def test_context_to_airflow_vars_with_default_context_vars(self): mock_method.return_value = {"airflow_cluster": [1, 2]} with pytest.raises(TypeError) as error: operator_helpers.context_to_airflow_vars(self.context) - assert "value of key must be string, not " == str(error.value) + assert str(error.value) == "value of key must be string, not " with mock.patch("airflow.settings.get_airflow_context_vars") as mock_method: mock_method.return_value = {1: "value"} with pytest.raises(TypeError) as error: operator_helpers.context_to_airflow_vars(self.context) - assert "key <1> must be string" == str(error.value) + assert str(error.value) == "key <1> must be string" def callable1(ds_nodash): diff --git a/tests/utils/test_process_utils.py b/tests/utils/test_process_utils.py index b32ecb3d41089..7edc5f28b73b2 100644 --- a/tests/utils/test_process_utils.py +++ b/tests/utils/test_process_utils.py @@ -104,7 +104,7 @@ class TestExecuteInSubProcess: def test_should_print_all_messages1(self, caplog): execute_in_subprocess(["bash", "-c", "echo CAT; echo KITTY;"]) msgs = [record.getMessage() for record in caplog.records] - assert ["Executing cmd: bash -c 'echo CAT; echo KITTY;'", "Output:", "CAT", "KITTY"] == msgs + assert msgs == ["Executing cmd: bash -c 'echo CAT; echo KITTY;'", "Output:", "CAT", "KITTY"] def test_should_print_all_messages_from_cwd(self, caplog, tmp_path): execute_in_subprocess(["bash", "-c", "echo CAT; pwd; echo KITTY;"], cwd=str(tmp_path)) @@ -179,30 +179,30 @@ def test_should_update_variable_and_restore_state_when_exit(self): with mock.patch.dict("os.environ", {"TEST_NOT_EXISTS": "BEFORE", "TEST_EXISTS": "BEFORE"}): del os.environ["TEST_NOT_EXISTS"] - assert "BEFORE" == os.environ["TEST_EXISTS"] + assert os.environ["TEST_EXISTS"] == "BEFORE" assert "TEST_NOT_EXISTS" not in os.environ with process_utils.patch_environ({"TEST_NOT_EXISTS": "AFTER", "TEST_EXISTS": "AFTER"}): - assert "AFTER" == os.environ["TEST_NOT_EXISTS"] - assert "AFTER" == os.environ["TEST_EXISTS"] + assert os.environ["TEST_NOT_EXISTS"] == "AFTER" + assert os.environ["TEST_EXISTS"] == "AFTER" - assert "BEFORE" == os.environ["TEST_EXISTS"] + assert os.environ["TEST_EXISTS"] == "BEFORE" assert "TEST_NOT_EXISTS" not in os.environ def test_should_restore_state_when_exception(self): with mock.patch.dict("os.environ", {"TEST_NOT_EXISTS": "BEFORE", "TEST_EXISTS": "BEFORE"}): del os.environ["TEST_NOT_EXISTS"] - assert "BEFORE" == os.environ["TEST_EXISTS"] + assert os.environ["TEST_EXISTS"] == "BEFORE" assert "TEST_NOT_EXISTS" not in os.environ with suppress(AirflowException): with process_utils.patch_environ({"TEST_NOT_EXISTS": "AFTER", "TEST_EXISTS": "AFTER"}): - assert "AFTER" == os.environ["TEST_NOT_EXISTS"] - assert "AFTER" == os.environ["TEST_EXISTS"] + assert os.environ["TEST_NOT_EXISTS"] == "AFTER" + assert os.environ["TEST_EXISTS"] == "AFTER" raise AirflowException("Unknown exception") - assert "BEFORE" == os.environ["TEST_EXISTS"] + assert os.environ["TEST_EXISTS"] == "BEFORE" assert "TEST_NOT_EXISTS" not in os.environ diff --git a/tests/utils/test_serve_logs.py b/tests/utils/test_serve_logs.py index 1e88e24ac39a1..667ac95db1154 100644 --- a/tests/utils/test_serve_logs.py +++ b/tests/utils/test_serve_logs.py @@ -102,7 +102,7 @@ def different_audience(secret_key): @pytest.mark.usefixtures("sample_log") class TestServeLogs: def test_forbidden_no_auth(self, client: FlaskClient): - assert 403 == client.get("/log/sample.log").status_code + assert client.get("/log/sample.log").status_code == 403 def test_should_serve_file(self, client: FlaskClient, signer): response = client.get( diff --git a/tests/www/test_app.py b/tests/www/test_app.py index 25bdcd2744e1d..c258b89ac3caa 100644 --- a/tests/www/test_app.py +++ b/tests/www/test_app.py @@ -85,7 +85,7 @@ def debug_view(): response = Response.from_app(app, environ) - assert b"success" == response.get_data() + assert response.get_data() == b"success" assert response.status_code == 200 @dont_initialize_flask_app_submodules @@ -121,7 +121,7 @@ def debug_view(): response = Response.from_app(app, environ) - assert b"success" == response.get_data() + assert response.get_data() == b"success" assert response.status_code == 200 @dont_initialize_flask_app_submodules @@ -169,7 +169,7 @@ def debug_view(): response = Response.from_app(app, environ) - assert b"success" == response.get_data() + assert response.get_data() == b"success" assert response.status_code == 200 @conf_vars( @@ -215,7 +215,7 @@ def debug_view(): response = Response.from_app(app, environ) - assert b"success" == response.get_data() + assert response.get_data() == b"success" assert response.status_code == 200 @conf_vars( diff --git a/tests/www/test_utils.py b/tests/www/test_utils.py index 672f1998a7f06..eba5b37758dab 100644 --- a/tests/www/test_utils.py +++ b/tests/www/test_utils.py @@ -112,7 +112,7 @@ def check_generate_pages_html( if node_text == str(current_page + 1): if check_middle: assert mid == i - assert "javascript:void(0)" == href_link + assert href_link == "javascript:void(0)" assert "active" in item["class"] else: assert re.search(r"^\?", href_link), "Link is page-relative" @@ -142,29 +142,30 @@ def test_generate_pager_current_start_with_sorting(self): def test_params_no_values(self): """Should return an empty string if no params are passed""" - assert "" == utils.get_params() + assert utils.get_params() == "" def test_params_search(self): - assert "search=bash_" == utils.get_params(search="bash_") + assert utils.get_params(search="bash_") == "search=bash_" def test_params_none_and_zero(self): query_str = utils.get_params(a=0, b=None, c="true") # The order won't be consistent, but that doesn't affect behaviour of a browser pairs = sorted(query_str.split("&")) - assert ["a=0", "c=true"] == pairs + assert pairs == ["a=0", "c=true"] def test_params_all(self): query = utils.get_params(tags=["tag1", "tag2"], status="active", page=3, search="bash_") - assert { + assert parse_qs(query) == { "tags": ["tag1", "tag2"], "page": ["3"], "search": ["bash_"], "status": ["active"], - } == parse_qs(query) + } def test_params_escape(self): - assert "search=%27%3E%22%2F%3E%3Cimg+src%3Dx+onerror%3Dalert%281%29%3E" == utils.get_params( - search="'>\"/>" + assert ( + utils.get_params(search="'>\"/>") + == "search=%27%3E%22%2F%3E%3Cimg+src%3Dx+onerror%3Dalert%281%29%3E" ) def test_state_token(self): @@ -304,7 +305,7 @@ def example_callable(unused_self): def test_python_callable_none(self): rendered = self.attr_renderer["python_callable"](None) - assert "" == rendered + assert rendered == "" def test_markdown(self): markdown = "* foo\n* bar" @@ -368,9 +369,9 @@ class TestWrappedMarkdown: def test_wrapped_markdown_with_docstring_curly_braces(self): rendered = wrapped_markdown("{braces}", css_class="a_class") assert ( - """

{braces}

+ rendered + == """

{braces}

""" - == rendered ) def test_wrapped_markdown_with_some_markdown(self): @@ -382,10 +383,10 @@ def test_wrapped_markdown_with_some_markdown(self): ) assert ( - """

italic + rendered + == """

italic bold

""" - == rendered ) def test_wrapped_markdown_with_table(self): @@ -398,7 +399,8 @@ def test_wrapped_markdown_with_table(self): ) assert ( - """
+ rendered + == """
@@ -413,7 +415,6 @@ def test_wrapped_markdown_with_table(self):
Job
""" - == rendered ) def test_wrapped_markdown_with_indented_lines(self): @@ -426,9 +427,9 @@ def test_wrapped_markdown_with_indented_lines(self): ) assert ( - """

header

\n

1st line\n2nd line

+ rendered + == """

header

\n

1st line\n2nd line

""" - == rendered ) def test_wrapped_markdown_with_raw_code_block(self): @@ -447,11 +448,11 @@ def test_wrapped_markdown_with_raw_code_block(self): ) assert ( - """

Markdown code block

+ rendered + == """

Markdown code block

Inline code works well.

Code block\ndoes not\nrespect\nnewlines\n
""" - == rendered ) def test_wrapped_markdown_with_nested_list(self): @@ -465,7 +466,8 @@ def test_wrapped_markdown_with_nested_list(self): ) assert ( - """

Docstring with a code block

+ rendered + == """

Docstring with a code block

  • And
      @@ -474,7 +476,6 @@ def test_wrapped_markdown_with_nested_list(self):
""" - == rendered ) @pytest.mark.parametrize( diff --git a/tests/www/views/test_anonymous_as_admin_role.py b/tests/www/views/test_anonymous_as_admin_role.py index 5a90f8becc267..fbff4cdd37409 100644 --- a/tests/www/views/test_anonymous_as_admin_role.py +++ b/tests/www/views/test_anonymous_as_admin_role.py @@ -56,7 +56,7 @@ def factory(**values): def test_delete_pool_anonymous_user_no_role(anonymous_client, pool_factory): pool = pool_factory() resp = anonymous_client.post(f"pool/delete/{pool.id}") - assert 302 == resp.status_code + assert resp.status_code == 302 assert f"/login/?next={quote_plus(f'http://localhost/pool/delete/{pool.id}')}" == resp.headers["Location"] diff --git a/tests/www/views/test_views.py b/tests/www/views/test_views.py index 99bc7e268ce80..338337d258bff 100644 --- a/tests/www/views/test_views.py +++ b/tests/www/views/test_views.py @@ -84,7 +84,7 @@ def test_webserver_configuration_config_file(mock_webserver_config_global, admin conf = write_default_airflow_configuration_if_needed() write_webserver_configuration_if_needed(conf) initialize_config() - assert airflow.configuration.WEBSERVER_CONFIG == config_file + assert config_file == airflow.configuration.WEBSERVER_CONFIG assert os.path.isfile(config_file) diff --git a/tests/www/views/test_views_base.py b/tests/www/views/test_views_base.py index 3c1f00fcc5e98..0c6316bf6058c 100644 --- a/tests/www/views/test_views_base.py +++ b/tests/www/views/test_views_base.py @@ -124,7 +124,7 @@ def test_health(request, admin_client, heartbeat): scheduler_status, last_scheduler_heartbeat = request.getfixturevalue(heartbeat) resp = admin_client.get("health", follow_redirects=True) resp_json = json.loads(resp.data.decode("utf-8")) - assert "healthy" == resp_json["metadatabase"]["status"] + assert resp_json["metadatabase"]["status"] == "healthy" assert scheduler_status == resp_json["scheduler"]["status"] assert last_scheduler_heartbeat == resp_json["scheduler"]["latest_scheduler_heartbeat"] diff --git a/tests/www/views/test_views_connection.py b/tests/www/views/test_views_connection.py index 28ea56a337a3b..f9a4efd11c15b 100644 --- a/tests/www/views/test_views_connection.py +++ b/tests/www/views/test_views_connection.py @@ -71,7 +71,7 @@ def test_connection_id_trailing_blanks(admin_client, session): check_content_in_response("Added Row", resp) conn = session.query(Connection).one() - assert "conn_id_with_trailing_blanks" == conn.conn_id + assert conn.conn_id == "conn_id_with_trailing_blanks" def test_connection_id_leading_blanks(admin_client, session): @@ -81,7 +81,7 @@ def test_connection_id_leading_blanks(admin_client, session): check_content_in_response("Added Row", resp) conn = session.query(Connection).one() - assert "conn_id_with_leading_blanks" == conn.conn_id + assert conn.conn_id == "conn_id_with_leading_blanks" def test_all_fields_with_blanks(admin_client, session): @@ -99,10 +99,10 @@ def test_all_fields_with_blanks(admin_client, session): # validate all the fields conn = session.query(Connection).one() - assert "connection_id_with_space" == conn.conn_id - assert "a sample http connection with leading and trailing blanks" == conn.description - assert "localhost" == conn.host - assert "airflow" == conn.schema + assert conn.conn_id == "connection_id_with_space" + assert conn.description == "a sample http connection with leading and trailing blanks" + assert conn.host == "localhost" + assert conn.schema == "airflow" @pytest.mark.enable_redact @@ -452,7 +452,7 @@ def test_connection_form_widgets_testable_types(mock_pm_hooks, admin_client): "third": None, } - assert ["first"] == ConnectionFormWidget().testable_connection_types + assert ConnectionFormWidget().testable_connection_types == ["first"] def test_process_form_invalid_extra_removed(admin_client): diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py index e3775e2a5498d..26d378750baca 100644 --- a/tests/www/views/test_views_extra_links.py +++ b/tests/www/views/test_views_extra_links.py @@ -191,7 +191,7 @@ def test_extra_links_error_raised(dag_run, task_1, viewer_client): follow_redirects=True, ) - assert 404 == response.status_code + assert response.status_code == 404 response_str = response.data if isinstance(response.data, bytes): response_str = response_str.decode() diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 59a2a288241b7..af6bf0582bab4 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -98,25 +98,25 @@ def call_kwargs(): def test_home_status_filter_cookie(admin_client): with admin_client: admin_client.get("home", follow_redirects=True) - assert "all" == flask.session[FILTER_STATUS_COOKIE] + assert flask.session[FILTER_STATUS_COOKIE] == "all" admin_client.get("home?status=active", follow_redirects=True) - assert "active" == flask.session[FILTER_STATUS_COOKIE] + assert flask.session[FILTER_STATUS_COOKIE] == "active" admin_client.get("home?status=paused", follow_redirects=True) - assert "paused" == flask.session[FILTER_STATUS_COOKIE] + assert flask.session[FILTER_STATUS_COOKIE] == "paused" admin_client.get("home?status=all", follow_redirects=True) - assert "all" == flask.session[FILTER_STATUS_COOKIE] + assert flask.session[FILTER_STATUS_COOKIE] == "all" admin_client.get("home?lastrun=running", follow_redirects=True) - assert "running" == flask.session[FILTER_LASTRUN_COOKIE] + assert flask.session[FILTER_LASTRUN_COOKIE] == "running" admin_client.get("home?lastrun=failed", follow_redirects=True) - assert "failed" == flask.session[FILTER_LASTRUN_COOKIE] + assert flask.session[FILTER_LASTRUN_COOKIE] == "failed" admin_client.get("home?lastrun=all_states", follow_redirects=True) - assert "all_states" == flask.session[FILTER_LASTRUN_COOKIE] + assert flask.session[FILTER_LASTRUN_COOKIE] == "all_states" @pytest.fixture(scope="module") @@ -283,7 +283,7 @@ def _broken_dags_after_working(tmp_path): def test_home_filter_tags(_working_dags, admin_client): with admin_client: admin_client.get("home?tags=example&tags=data", follow_redirects=True) - assert "example,data" == flask.session[FILTER_TAGS_COOKIE] + assert flask.session[FILTER_TAGS_COOKIE] == "example,data" admin_client.get("home?reset_tags", follow_redirects=True) assert flask.session[FILTER_TAGS_COOKIE] is None diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index a757239c553ba..15cee40796bb2 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -278,7 +278,7 @@ def test_get_logs_with_metadata_as_download_file(log_admin_client, create_expect f"dag_id={DAG_ID}/run_id=scheduled__{date}/task_id={TASK_ID}/attempt={try_number}.log" in content_disposition ) - assert 200 == response.status_code + assert response.status_code == 200 content = response.data.decode("utf-8") assert "Log for testing." in content assert "localhost\n" in content @@ -321,7 +321,7 @@ def test_get_logs_for_changed_filename_format_db( response = log_admin_client.get(url) # Should find the log under corresponding db entry. - assert 200 == response.status_code + assert response.status_code == 200 assert "Log for testing." in response.data.decode("utf-8") content_disposition = response.headers["Content-Disposition"] expected_filename = ( @@ -379,7 +379,7 @@ def test_get_logs_with_metadata(log_admin_client, metadata, create_expected_log_ data={"username": "test", "password": "test"}, follow_redirects=True, ) - assert 200 == response.status_code + assert response.status_code == 200 data = response.data.decode() assert '"message":' in data @@ -424,7 +424,7 @@ def test_get_logs_with_metadata_for_removed_dag(_, log_admin_client): data={"username": "test", "password": "test"}, follow_redirects=True, ) - assert 200 == response.status_code + assert response.status_code == 200 data = response.data.decode() assert '"message":' in data @@ -451,7 +451,7 @@ def test_get_logs_response_with_ti_equal_to_none(log_admin_client): data = response.json assert "message" in data assert "error" in data - assert "*** Task instance did not exist in the DB\n" == data["message"] + assert data["message"] == "*** Task instance did not exist in the DB\n" def test_get_logs_with_json_response_format(log_admin_client, create_expected_log_file): @@ -470,7 +470,7 @@ def test_get_logs_with_json_response_format(log_admin_client, create_expected_lo "{}", ) response = log_admin_client.get(url) - assert 200 == response.status_code + assert response.status_code == 200 assert "message" in response.json assert "metadata" in response.json @@ -518,7 +518,7 @@ def test_get_logs_for_handler_without_read_method(mock_reader, log_admin_client) "{}", ) response = log_admin_client.get(url) - assert 200 == response.status_code + assert response.status_code == 200 data = response.json assert "message" in data @@ -538,8 +538,8 @@ def test_redirect_to_external_log_with_local_log_handler(log_admin_client, task_ try_number, ) response = log_admin_client.get(url) - assert 302 == response.status_code - assert "/home" == response.headers["Location"] + assert response.status_code == 302 + assert response.headers["Location"] == "/home" class _ExternalHandler(ExternalLoggingMixin): @@ -572,5 +572,5 @@ def test_redirect_to_external_log_with_external_log_handler(_, log_admin_client) try_number, ) response = log_admin_client.get(url) - assert 302 == response.status_code - assert _ExternalHandler.EXTERNAL_URL == response.headers["Location"] + assert response.status_code == 302 + assert response.headers["Location"] == _ExternalHandler.EXTERNAL_URL diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index 05b55a9f4120b..2f902395e6c9f 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -392,7 +392,7 @@ def test_rendered_k8s(admin_client): def test_rendered_k8s_without_k8s(admin_client): url = f"rendered-k8s?task_id=runme_0&dag_id=example_bash_operator&logical_date={DEFAULT_VAL}" resp = admin_client.get(url, follow_redirects=True) - assert 404 == resp.status_code + assert resp.status_code == 404 def test_tree_trigger_origin_tree_view(app, admin_client):