diff --git a/.github/workflows/deploy-docs.yaml b/.github/workflows/deploy-docs.yaml index 2d55aabed3..30a0df5e09 100644 --- a/.github/workflows/deploy-docs.yaml +++ b/.github/workflows/deploy-docs.yaml @@ -6,6 +6,7 @@ on: paths: - docs/** - .github/workflows/deploy-docs.yaml + - faststream/__about__.py permissions: contents: write @@ -26,10 +27,17 @@ jobs: - run: pip install -e ".[dev]" - run: ./scripts/build-docs.sh - run: echo "VERSION=$(python3 -c 'from importlib.metadata import version; print(".".join(version("faststream").split(".")[:2]))')" >> $GITHUB_ENV + - run: echo "IS_RC=$(python3 -c 'from importlib.metadata import version; print("rc" in version("faststream"))')" >> $GITHUB_ENV - name: Configure Git user run: | git config --local user.email "github-actions[bot]@users.noreply.github.com" git config --local user.name "github-actions[bot]" - run: echo $VERSION - - run: cd docs && mike deploy -F mkdocs.yml --update-aliases $VERSION latest - - run: cd docs && mike set-default --push --allow-empty -F mkdocs.yml latest + - run: echo $IS_RC + - run: | + if [ "$IS_RC" == "False" ]; then + cd docs && mike deploy -F mkdocs.yml --update-aliases $VERSION latest + mike set-default --push --allow-empty -F mkdocs.yml latest + else + cd docs && mike deploy --push -F mkdocs.yml --update-aliases $VERSION + fi diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index bf217c9110..b3f0209d55 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -22,7 +22,7 @@ jobs: run: | set -ux python -m pip install --upgrade pip - pip install -e ".[docs,rabbit,kafka,nats,lint]" + pip install -e ".[docs,rabbit,kafka,redis,nats,lint]" - name: Run mypy shell: bash run: mypy faststream tests/mypy @@ -59,7 +59,7 @@ jobs: key: ${{ runner.os }}-python-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-test-v03 - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[rabbit,kafka,nats,docs,testing] + run: pip install .[rabbit,kafka,nats,redis,docs,testing] - name: Install Pydantic v1 if: matrix.pydantic-version == 'pydantic-v1' run: pip install "pydantic>=1.10.0,<2.0.0" @@ -68,7 +68,7 @@ jobs: run: pip install --pre "pydantic>=2.0.0b2,<3.0.0" - run: mkdir coverage - name: Test - run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not rabbit)) or (not nats and not kafka and not rabbit)" + run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not rabbit and not redis)) or (not nats and not kafka and not rabbit and not redis)" env: COVERAGE_FILE: coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }} CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }} @@ -91,9 +91,9 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[rabbit,kafka,nats,docs,testing] + run: pip install .[rabbit,kafka,nats,redis,docs,testing] - name: Test - run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not rabbit)) or (not nats and not kafka and not rabbit)" + run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not rabbit and not redis)) or (not nats and not kafka and not rabbit and not redis)" test-windows-latest: if: github.event.pull_request.draft == false @@ -108,9 +108,9 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[rabbit,kafka,nats,docs,testing] + run: pip install .[rabbit,kafka,nats,redis,docs,testing] - name: Test - run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not rabbit)) or (not nats and not kafka and not rabbit)" + run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not rabbit and not redis)) or (not nats and not kafka and not rabbit and not redis)" test-kafka-real: if: github.event.pull_request.draft == false @@ -141,7 +141,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,rabbit,docs,testing] + run: pip install .[nats,kafka,rabbit,redis,docs,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and kafka) or kafka" @@ -189,7 +189,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,rabbit,docs,testing] + run: pip install .[nats,kafka,rabbit,redis,docs,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and rabbit) or rabbit" @@ -237,7 +237,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,rabbit,docs,testing] + run: pip install .[nats,kafka,rabbit,redis,docs,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and nats) or nats" @@ -267,6 +267,54 @@ jobs: - name: Test run: bash scripts/test.sh -m "not nats" tests/brokers/nats/test_test_client.py + test-redis-real: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + services: + nats: + image: redis:alpine + ports: + - 6379:6379 + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: pip install .[nats,kafka,rabbit,redis,docs,testing] + - run: mkdir coverage + - name: Test + run: bash scripts/test.sh -m "(slow and redis) or redis" + env: + COVERAGE_FILE: coverage/.coverage.redis-py + CONTEXT: redis-py + - name: Store coverage files + uses: actions/upload-artifact@v3 + with: + name: coverage + path: coverage + + test-redis-smoke: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: pip install .[redis,test-core] + - name: Test + run: bash scripts/test.sh -m "not redis" tests/brokers/redis/test_test_client.py + coverage-combine: if: github.event.pull_request.draft == false needs: @@ -274,6 +322,7 @@ jobs: - test-kafka-real - test-rabbit-real - test-nats-real + - test-redis-real runs-on: ubuntu-latest steps: @@ -314,11 +363,13 @@ jobs: - test-macos-latest - test-windows-latest - test-kafka-real - - test-rabbit-real - - test-nats-real - test-kafka-smoke + - test-rabbit-real - test-rabbit-smoke + - test-nats-real - test-nats-smoke + - test-redis-real + - test-redis-smoke runs-on: ubuntu-latest diff --git a/.secrets.baseline b/.secrets.baseline index ffa425f599..fa7123a678 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -128,7 +128,7 @@ "filename": "docs/docs/en/release.md", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 134, + "line_number": 136, "is_secret": false } ], @@ -158,10 +158,10 @@ "filename": "faststream/rabbit/broker.py", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 57, + "line_number": 58, "is_secret": false } ] }, - "generated_at": "2023-11-24T08:08:28Z" + "generated_at": "2023-11-30T09:12:40Z" } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 06b7395908..30c6a128f4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,15 +1,5 @@ > **_NOTE:_** This is an auto-generated file. Please edit docs/docs/en/getting-started/contributing/CONTRIBUTING.md instead. ---- -# 0.5 - API -# 2 - Release -# 3 - Contributing -# 5 - Template Page -# 10 - Default -search: - boost: 3 ---- - # Development After cloning the project, you'll need to set up the development environment. Here are the guidelines on how to do this. @@ -46,17 +36,17 @@ After activating the virtual environment as described above, run: pip install -e ".[dev]" ``` -This will install all the dependencies and your local FastStream in your virtual environment. +This will install all the dependencies and your local **FastStream** in your virtual environment. -### Using Your local FastStream +### Using Your local **FastStream** -If you create a Python file that imports and uses FastStream, and run it with the Python from your local environment, it will use your local FastStream source code. +If you create a Python file that imports and uses **FastStream**, and run it with the Python from your local environment, it will use your local **FastStream** source code. -Whenever you update your local FastStream source code, it will automatically use the latest version when you run your Python file again. This is because it is installed with `-e`. +Whenever you update your local **FastStream** source code, it will automatically use the latest version when you run your Python file again. This is because it is installed with `-e`. This way, you don't have to "install" your local version to be able to test every change. -To use your local FastStream CLI, type: +To use your local **FastStream CLI**, type: ```bash python -m faststream ... @@ -66,7 +56,7 @@ python -m faststream ... ### Pytest -To run tests with your current FastStream application and Python environment, use: +To run tests with your current **FastStream** application and Python environment, use: ```bash pytest tests @@ -82,6 +72,7 @@ In your project, you'll find some *pytest marks*: * **rabbit** * **kafka** * **nats** +* **redis** * **all** By default, running *pytest* will execute "not slow" tests. @@ -95,7 +86,7 @@ pytest -m 'all' If you don't have a local broker instance running, you can run tests without those dependencies: ```bash -pytest -m 'not rabbit and not kafka and not nats' +pytest -m 'not rabbit and not kafka and not nats and not redis' ``` To run tests based on RabbitMQ, Kafka, or other dependencies, the following dependencies are needed to be started as docker containers: @@ -140,6 +131,14 @@ services: # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges security_opt: - no-new-privileges:true + # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service + redis: + image: redis:alpine + ports: + - 6379:6379 + # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges + security_opt: + - no-new-privileges:true ``` You can start the dependencies easily using provided script by running: diff --git a/README.md b/README.md index 274cfe7c33..f1321205e1 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ parsing, networking and documentation generation automatically. Making streaming microservices has never been easier. Designed with junior developers in mind, **FastStream** simplifies your work while keeping the door open for more advanced use cases. Here's a look at the core features that make **FastStream** a go-to framework for modern, data-centric microservices. -- **Multiple Brokers**: **FastStream** provides a unified API to work across multiple message brokers (**Kafka**, **RabbitMQ**, **NATS**, support) +- **Multiple Brokers**: **FastStream** provides a unified API to work across multiple message brokers (**Kafka**, **RabbitMQ**, **NATS**, **Redis** support) - [**Pydantic Validation**](#writing-app-code): Leverage [**Pydantic's**](https://docs.pydantic.dev/) validation capabilities to serialize and validates incoming messages @@ -102,6 +102,8 @@ pip install faststream[kafka] pip install faststream[rabbit] # or pip install faststream[nats] +# or +pip install faststream[redis] ``` By default **FastStream** uses **PydanticV2** written in **Rust**, but you can downgrade it manually, if your platform has no **Rust** support - **FastStream** will work correctly with **PydanticV1** as well. @@ -129,10 +131,12 @@ from faststream import FastStream from faststream.kafka import KafkaBroker # from faststream.rabbit import RabbitBroker # from faststream.nats import NatsBroker +# from faststream.redis import RedisBroker broker = KafkaBroker("localhost:9092") # broker = RabbitBroker("amqp://guest:guest@localhost:5672/") # broker = NatsBroker("nats://localhost:4222/") +# broker = RedisBroker("redis://localhost:6379/") app = FastStream(broker) diff --git a/docs/create_api_docs.py b/docs/create_api_docs.py index 1678f88f87..2d892fa83c 100644 --- a/docs/create_api_docs.py +++ b/docs/create_api_docs.py @@ -252,6 +252,7 @@ def _generate_api_docs_for_module(root_path: Path, module_name: str) -> str: api_summary = _get_api_summary(members_with_submodules) api_root = root_path / "docs" / "en" / "api" + api_root.mkdir(parents=True, exist_ok=True) (api_root / ".meta.yml").write_text(API_META) @@ -274,9 +275,9 @@ def create_api_docs( docs_dir = root_path / "docs" # read summary template from file - summary_template = (docs_dir / "summary_template.txt").read_text() + navigation_template = (docs_dir / "navigation_template.txt").read_text() - summary = summary_template.format(api=api) + summary = navigation_template.format(api=api) summary = "\n".join(filter( bool, diff --git a/docs/docs.py b/docs/docs.py index 0d9bf597da..93350b9e0a 100644 --- a/docs/docs.py +++ b/docs/docs.py @@ -9,10 +9,10 @@ import mkdocs.commands.serve import typer from create_api_docs import create_api_docs -from expand_markdown import expand_markdown, remove_lines_between_dashes -from update_releases import update_release_notes +from expand_markdown import expand_markdown from mkdocs.config import load_config from typing_extensions import Annotated +from update_releases import find_metablock, update_release_notes IGNORE_DIRS = ("assets", "stylesheets") @@ -31,7 +31,6 @@ EN_DOCS_DIR / "getting-started" / "contributing" / "CONTRIBUTING.md" ) CONTRIBUTING_PATH = BASE_DIR.parent / "CONTRIBUTING.md" -FASTSTREAM_GEN_DOCS_PATH = BASE_DIR.parent / ".faststream_gen" config = load_config(str(CONFIG)) @@ -39,12 +38,12 @@ DEV_SERVER = str(config.get("dev_addr", "0.0.0.0:8008")) -def get_missing_translation(lng: str) -> str: - return str(Path(DOCS_DIR.name) / lng / "helpful" / "missing-translation.md") +def get_missing_translation(lng: str) -> Path: + return DOCS_DIR / lng / "helpful" / "missing-translation.md" -def get_in_progress(lng: str) -> str: - return str(Path(DOCS_DIR.name) / lng / "helpful" / "in-progress.md") +def get_in_progress(lng: str) -> Path: + return DOCS_DIR / lng / "helpful" / "in-progress.md" app = typer.Typer() @@ -82,7 +81,7 @@ def preview(): typer.echo("Warning: this is a very simple server.") typer.echo("For development, use the command live instead.") typer.echo("This is here only to preview a builded site.") - os.chdir(str(BUILD_DIR)) + os.chdir(BUILD_DIR) addr, port = DEV_SERVER.split(":") server = HTTPServer((addr, int(port)), SimpleHTTPRequestHandler) typer.echo(f"Serving at: http://{DEV_SERVER}") @@ -174,7 +173,7 @@ def mv(path: str = typer.Argument(...), new_path: str = typer.Argument(...)): def update_readme() -> None: """Update README.md by expanding embeddings in docs/docs/en/index.md""" # todo: fix this function - typer.echo(f"Skipping updating README.md for now") + typer.echo("Skipping updating README.md for now") return None # typer.echo(f"Updating README.md") @@ -192,16 +191,22 @@ def update_readme() -> None: @app.command() def update_contributing(): """Update CONTRIBUTING.md by expanding embeddings in docs/docs/en/CONTRIBUTING.md""" - typer.echo(f"Updating CONTRIBUTING.md") + typer.echo("Updating CONTRIBUTING.md") expand_markdown( - input_markdown_path=EN_CONTRIBUTING_PATH, output_markdown_path=CONTRIBUTING_PATH + input_markdown_path=EN_CONTRIBUTING_PATH, + output_markdown_path=CONTRIBUTING_PATH, ) - relative_path = os.path.relpath(EN_CONTRIBUTING_PATH, BASE_DIR.parent) - auto_generated = f"> **_NOTE:_** This is an auto-generated file. Please edit {relative_path} instead.\n\n" + existing_content = CONTRIBUTING_PATH.read_text() - existing_content = open(CONTRIBUTING_PATH).read() - open(CONTRIBUTING_PATH, "w").write(auto_generated + existing_content) + _, content = find_metablock(existing_content.splitlines()) + + relative_path = EN_CONTRIBUTING_PATH.relative_to(BASE_DIR.parent) + + CONTRIBUTING_PATH.write_text("\n".join(( + f"> **_NOTE:_** This is an auto-generated file. Please edit {relative_path} instead.", + *content + ))+"\n") @app.command() @@ -213,9 +218,12 @@ def build_api_docs(): def _build(): subprocess.run(["mkdocs", "build", "--site-dir", BUILD_DIR], check=True) + build_api_docs() update_readme() update_contributing() + + typer.echo("Updating Release Notes") update_release_notes(realease_notes_path=EN_DOCS_DIR / "release.md") diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index eb62df049c..c477a2f18d 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -1,7 +1,3 @@ ---- -search: - exclude: true ---- - [Features](faststream.md) - Tutorial - [Getting Started](getting-started/index.md) @@ -28,8 +24,9 @@ search: - [Decoder](getting-started/serialization/decoder.md) - [Examples](getting-started/serialization/examples.md) - [Lifespan](getting-started/lifespan/index.md) - - [Lifespan Hooks](getting-started/lifespan/hooks.md) - - [Lifespan Testing](getting-started/lifespan/test.md) + - [Hooks](getting-started/lifespan/hooks.md) + - [Context](getting-started/lifespan/context.md) + - [Testing](getting-started/lifespan/test.md) - [Middlewares](getting-started/middlewares/index.md) - AsyncAPI - [Schema Export](getting-started/asyncapi/export.md) @@ -77,6 +74,22 @@ search: - [Publishing](nats/publishing/index.md) - [RPC](nats/rpc.md) - [Message Information](nats/message.md) +- [Redis](redis/index.md) + - [Pub/Sub](redis/pubsub/index.md) + - [Subscription](redis/pubsub/subscription.md) + - [Publishing](redis/pubsub/publishing.md) + - [List](redis/list/index.md) + - [Subscription](redis/list/subscription.md) + - [Publishing](redis/list/publishing.md) + - [Batching](redis/list/batch.md) + - [Streams](redis/streams/index.md) + - [Subscription](redis/streams/subscription.md) + - [Publishing](redis/streams/publishing.md) + - [Groups](redis/streams/groups.md) + - [Batching](redis/streams/batch.md) + - [Acknowledgement](redis/streams/ack.md) + - [RPC](redis/rpc.md) + - [Message Information](redis/message.md) - [Reference - Code API](api/faststream/index.md) - faststream - [BaseMiddleware](api/faststream/BaseMiddleware.md) @@ -159,7 +172,6 @@ search: - info - [Contact](api/faststream/asyncapi/schema/info/Contact.md) - [ContactDict](api/faststream/asyncapi/schema/info/ContactDict.md) - - [EmailStr](api/faststream/asyncapi/schema/info/EmailStr.md) - [Info](api/faststream/asyncapi/schema/info/Info.md) - [License](api/faststream/asyncapi/schema/info/License.md) - [LicenseDict](api/faststream/asyncapi/schema/info/LicenseDict.md) @@ -469,6 +481,56 @@ search: - [PatchedMessage](api/faststream/rabbit/test/PatchedMessage.md) - [TestRabbitBroker](api/faststream/rabbit/test/TestRabbitBroker.md) - [build_message](api/faststream/rabbit/test/build_message.md) + - redis + - [ListSub](api/faststream/redis/ListSub.md) + - [PubSub](api/faststream/redis/PubSub.md) + - [RedisBroker](api/faststream/redis/RedisBroker.md) + - [RedisRoute](api/faststream/redis/RedisRoute.md) + - [RedisRouter](api/faststream/redis/RedisRouter.md) + - [StreamSub](api/faststream/redis/StreamSub.md) + - [TestApp](api/faststream/redis/TestApp.md) + - [TestRedisBroker](api/faststream/redis/TestRedisBroker.md) + - asyncapi + - [Handler](api/faststream/redis/asyncapi/Handler.md) + - [Publisher](api/faststream/redis/asyncapi/Publisher.md) + - broker + - [RedisBroker](api/faststream/redis/broker/RedisBroker.md) + - fastapi + - [RedisRouter](api/faststream/redis/fastapi/RedisRouter.md) + - handler + - [LogicRedisHandler](api/faststream/redis/handler/LogicRedisHandler.md) + - message + - [AnyRedisDict](api/faststream/redis/message/AnyRedisDict.md) + - [BatchMessage](api/faststream/redis/message/BatchMessage.md) + - [BatchRedisMessage](api/faststream/redis/message/BatchRedisMessage.md) + - [OneMessage](api/faststream/redis/message/OneMessage.md) + - [OneRedisMessage](api/faststream/redis/message/OneRedisMessage.md) + - [PubSubMessage](api/faststream/redis/message/PubSubMessage.md) + - [RedisAckMixin](api/faststream/redis/message/RedisAckMixin.md) + - [RedisMessage](api/faststream/redis/message/RedisMessage.md) + - parser + - [RawMessage](api/faststream/redis/parser/RawMessage.md) + - [RedisParser](api/faststream/redis/parser/RedisParser.md) + - producer + - [RedisFastProducer](api/faststream/redis/producer/RedisFastProducer.md) + - publisher + - [LogicPublisher](api/faststream/redis/publisher/LogicPublisher.md) + - router + - [RedisRouter](api/faststream/redis/router/RedisRouter.md) + - schemas + - [ListSub](api/faststream/redis/schemas/ListSub.md) + - [PubSub](api/faststream/redis/schemas/PubSub.md) + - [StreamSub](api/faststream/redis/schemas/StreamSub.md) + - shared + - logging + - [RedisLoggingMixin](api/faststream/redis/shared/logging/RedisLoggingMixin.md) + - router + - [RedisRoute](api/faststream/redis/shared/router/RedisRoute.md) + - [RedisRouter](api/faststream/redis/shared/router/RedisRouter.md) + - test + - [FakeProducer](api/faststream/redis/test/FakeProducer.md) + - [TestRedisBroker](api/faststream/redis/test/TestRedisBroker.md) + - [build_message](api/faststream/redis/test/build_message.md) - security - [BaseSecurity](api/faststream/security/BaseSecurity.md) - [SASLPlaintext](api/faststream/security/SASLPlaintext.md) @@ -482,6 +544,11 @@ search: - [NoCast](api/faststream/utils/NoCast.md) - [Path](api/faststream/utils/Path.md) - [apply_types](api/faststream/utils/apply_types.md) + - ast + - [find_ast_node](api/faststream/utils/ast/find_ast_node.md) + - [find_withitems](api/faststream/utils/ast/find_withitems.md) + - [get_withitem_calls](api/faststream/utils/ast/get_withitem_calls.md) + - [is_contains_context_name](api/faststream/utils/ast/is_contains_context_name.md) - classes - [Singleton](api/faststream/utils/classes/Singleton.md) - context @@ -504,6 +571,8 @@ search: - [filter_by_dict](api/faststream/utils/data/filter_by_dict.md) - functions - [call_or_await](api/faststream/utils/functions/call_or_await.md) + - [drop_response_type](api/faststream/utils/functions/drop_response_type.md) + - [fake_context](api/faststream/utils/functions/fake_context.md) - [get_function_positional_arguments](api/faststream/utils/functions/get_function_positional_arguments.md) - [timeout_scope](api/faststream/utils/functions/timeout_scope.md) - [to_async](api/faststream/utils/functions/to_async.md) diff --git a/docs/docs/en/api/faststream/asyncapi/schema/info/EmailStr.md b/docs/docs/en/api/faststream/redis/ListSub.md similarity index 70% rename from docs/docs/en/api/faststream/asyncapi/schema/info/EmailStr.md rename to docs/docs/en/api/faststream/redis/ListSub.md index 553df26149..9c97a0afcd 100644 --- a/docs/docs/en/api/faststream/asyncapi/schema/info/EmailStr.md +++ b/docs/docs/en/api/faststream/redis/ListSub.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.asyncapi.schema.info.EmailStr +::: faststream.redis.ListSub diff --git a/docs/docs/en/api/faststream/redis/PubSub.md b/docs/docs/en/api/faststream/redis/PubSub.md new file mode 100644 index 0000000000..d2fba00014 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/PubSub.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.PubSub diff --git a/docs/docs/en/api/faststream/redis/RedisBroker.md b/docs/docs/en/api/faststream/redis/RedisBroker.md new file mode 100644 index 0000000000..7275bfb60a --- /dev/null +++ b/docs/docs/en/api/faststream/redis/RedisBroker.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.RedisBroker diff --git a/docs/docs/en/api/faststream/redis/RedisRoute.md b/docs/docs/en/api/faststream/redis/RedisRoute.md new file mode 100644 index 0000000000..9a4db2729b --- /dev/null +++ b/docs/docs/en/api/faststream/redis/RedisRoute.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.broker.router.BrokerRoute diff --git a/docs/docs/en/api/faststream/redis/RedisRouter.md b/docs/docs/en/api/faststream/redis/RedisRouter.md new file mode 100644 index 0000000000..9b7292e703 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/RedisRouter.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.RedisRouter diff --git a/docs/docs/en/api/faststream/redis/StreamSub.md b/docs/docs/en/api/faststream/redis/StreamSub.md new file mode 100644 index 0000000000..d1244238b6 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/StreamSub.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.StreamSub diff --git a/docs/docs/en/api/faststream/redis/TestApp.md b/docs/docs/en/api/faststream/redis/TestApp.md new file mode 100644 index 0000000000..52842547f0 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/TestApp.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.broker.test.TestApp diff --git a/docs/docs/en/api/faststream/redis/TestRedisBroker.md b/docs/docs/en/api/faststream/redis/TestRedisBroker.md new file mode 100644 index 0000000000..703490c302 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/TestRedisBroker.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.TestRedisBroker diff --git a/docs/docs/en/api/faststream/redis/asyncapi/Handler.md b/docs/docs/en/api/faststream/redis/asyncapi/Handler.md new file mode 100644 index 0000000000..6863997668 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/asyncapi/Handler.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.asyncapi.Handler diff --git a/docs/docs/en/api/faststream/redis/asyncapi/Publisher.md b/docs/docs/en/api/faststream/redis/asyncapi/Publisher.md new file mode 100644 index 0000000000..a4bec84fd4 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/asyncapi/Publisher.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.asyncapi.Publisher diff --git a/docs/docs/en/api/faststream/redis/broker/RedisBroker.md b/docs/docs/en/api/faststream/redis/broker/RedisBroker.md new file mode 100644 index 0000000000..ad22e9e965 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/broker/RedisBroker.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.broker.RedisBroker diff --git a/docs/docs/en/api/faststream/redis/fastapi/RedisRouter.md b/docs/docs/en/api/faststream/redis/fastapi/RedisRouter.md new file mode 100644 index 0000000000..7894f88728 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/fastapi/RedisRouter.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.fastapi.RedisRouter diff --git a/docs/docs/en/api/faststream/redis/handler/LogicRedisHandler.md b/docs/docs/en/api/faststream/redis/handler/LogicRedisHandler.md new file mode 100644 index 0000000000..ad9a6530f4 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/handler/LogicRedisHandler.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.handler.LogicRedisHandler diff --git a/docs/docs/en/api/faststream/redis/message/AnyRedisDict.md b/docs/docs/en/api/faststream/redis/message/AnyRedisDict.md new file mode 100644 index 0000000000..babc4b57f4 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/AnyRedisDict.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.AnyRedisDict diff --git a/docs/docs/en/api/faststream/redis/message/BatchMessage.md b/docs/docs/en/api/faststream/redis/message/BatchMessage.md new file mode 100644 index 0000000000..8c86e83a77 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/BatchMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.BatchMessage diff --git a/docs/docs/en/api/faststream/redis/message/BatchRedisMessage.md b/docs/docs/en/api/faststream/redis/message/BatchRedisMessage.md new file mode 100644 index 0000000000..ea2e75686d --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/BatchRedisMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.BatchRedisMessage diff --git a/docs/docs/en/api/faststream/redis/message/OneMessage.md b/docs/docs/en/api/faststream/redis/message/OneMessage.md new file mode 100644 index 0000000000..7d17df8d47 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/OneMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.OneMessage diff --git a/docs/docs/en/api/faststream/redis/message/OneRedisMessage.md b/docs/docs/en/api/faststream/redis/message/OneRedisMessage.md new file mode 100644 index 0000000000..ce450e6fe0 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/OneRedisMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.OneRedisMessage diff --git a/docs/docs/en/api/faststream/redis/message/PubSubMessage.md b/docs/docs/en/api/faststream/redis/message/PubSubMessage.md new file mode 100644 index 0000000000..795cecb12e --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/PubSubMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.PubSubMessage diff --git a/docs/docs/en/api/faststream/redis/message/RedisAckMixin.md b/docs/docs/en/api/faststream/redis/message/RedisAckMixin.md new file mode 100644 index 0000000000..06ad2ee1f6 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/RedisAckMixin.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.RedisAckMixin diff --git a/docs/docs/en/api/faststream/redis/message/RedisMessage.md b/docs/docs/en/api/faststream/redis/message/RedisMessage.md new file mode 100644 index 0000000000..1b0654e7ce --- /dev/null +++ b/docs/docs/en/api/faststream/redis/message/RedisMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.message.RedisMessage diff --git a/docs/docs/en/api/faststream/redis/parser/RawMessage.md b/docs/docs/en/api/faststream/redis/parser/RawMessage.md new file mode 100644 index 0000000000..4add7b37fd --- /dev/null +++ b/docs/docs/en/api/faststream/redis/parser/RawMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.parser.RawMessage diff --git a/docs/docs/en/api/faststream/redis/parser/RedisParser.md b/docs/docs/en/api/faststream/redis/parser/RedisParser.md new file mode 100644 index 0000000000..92be79bafc --- /dev/null +++ b/docs/docs/en/api/faststream/redis/parser/RedisParser.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.parser.RedisParser diff --git a/docs/docs/en/api/faststream/redis/producer/RedisFastProducer.md b/docs/docs/en/api/faststream/redis/producer/RedisFastProducer.md new file mode 100644 index 0000000000..c981d8d378 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/producer/RedisFastProducer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.producer.RedisFastProducer diff --git a/docs/docs/en/api/faststream/redis/publisher/LogicPublisher.md b/docs/docs/en/api/faststream/redis/publisher/LogicPublisher.md new file mode 100644 index 0000000000..aae6da11af --- /dev/null +++ b/docs/docs/en/api/faststream/redis/publisher/LogicPublisher.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.publisher.LogicPublisher diff --git a/docs/docs/en/api/faststream/redis/router/RedisRouter.md b/docs/docs/en/api/faststream/redis/router/RedisRouter.md new file mode 100644 index 0000000000..373ceea5a8 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/router/RedisRouter.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.router.RedisRouter diff --git a/docs/docs/en/api/faststream/redis/schemas/ListSub.md b/docs/docs/en/api/faststream/redis/schemas/ListSub.md new file mode 100644 index 0000000000..3e0b448229 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/schemas/ListSub.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.schemas.ListSub diff --git a/docs/docs/en/api/faststream/redis/schemas/PubSub.md b/docs/docs/en/api/faststream/redis/schemas/PubSub.md new file mode 100644 index 0000000000..078a8e2d8e --- /dev/null +++ b/docs/docs/en/api/faststream/redis/schemas/PubSub.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.schemas.PubSub diff --git a/docs/docs/en/api/faststream/redis/schemas/StreamSub.md b/docs/docs/en/api/faststream/redis/schemas/StreamSub.md new file mode 100644 index 0000000000..396e594c0b --- /dev/null +++ b/docs/docs/en/api/faststream/redis/schemas/StreamSub.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.schemas.StreamSub diff --git a/docs/docs/en/api/faststream/redis/shared/logging/RedisLoggingMixin.md b/docs/docs/en/api/faststream/redis/shared/logging/RedisLoggingMixin.md new file mode 100644 index 0000000000..8fcf106f58 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/shared/logging/RedisLoggingMixin.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.shared.logging.RedisLoggingMixin diff --git a/docs/docs/en/api/faststream/redis/shared/router/RedisRoute.md b/docs/docs/en/api/faststream/redis/shared/router/RedisRoute.md new file mode 100644 index 0000000000..9a4db2729b --- /dev/null +++ b/docs/docs/en/api/faststream/redis/shared/router/RedisRoute.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.broker.router.BrokerRoute diff --git a/docs/docs/en/api/faststream/redis/shared/router/RedisRouter.md b/docs/docs/en/api/faststream/redis/shared/router/RedisRouter.md new file mode 100644 index 0000000000..e665817d83 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/shared/router/RedisRouter.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.shared.router.RedisRouter diff --git a/docs/docs/en/api/faststream/redis/test/FakeProducer.md b/docs/docs/en/api/faststream/redis/test/FakeProducer.md new file mode 100644 index 0000000000..478caeadb9 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/test/FakeProducer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.test.FakeProducer diff --git a/docs/docs/en/api/faststream/redis/test/TestRedisBroker.md b/docs/docs/en/api/faststream/redis/test/TestRedisBroker.md new file mode 100644 index 0000000000..6f65e8d080 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/test/TestRedisBroker.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.test.TestRedisBroker diff --git a/docs/docs/en/api/faststream/redis/test/build_message.md b/docs/docs/en/api/faststream/redis/test/build_message.md new file mode 100644 index 0000000000..45cefc0cb7 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/test/build_message.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.test.build_message diff --git a/docs/docs/en/api/faststream/utils/ast/find_ast_node.md b/docs/docs/en/api/faststream/utils/ast/find_ast_node.md new file mode 100644 index 0000000000..228e6f058c --- /dev/null +++ b/docs/docs/en/api/faststream/utils/ast/find_ast_node.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.utils.ast.find_ast_node diff --git a/docs/docs/en/api/faststream/utils/ast/find_withitems.md b/docs/docs/en/api/faststream/utils/ast/find_withitems.md new file mode 100644 index 0000000000..123acd71e4 --- /dev/null +++ b/docs/docs/en/api/faststream/utils/ast/find_withitems.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.utils.ast.find_withitems diff --git a/docs/docs/en/api/faststream/utils/ast/get_withitem_calls.md b/docs/docs/en/api/faststream/utils/ast/get_withitem_calls.md new file mode 100644 index 0000000000..c9d68c1ed2 --- /dev/null +++ b/docs/docs/en/api/faststream/utils/ast/get_withitem_calls.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.utils.ast.get_withitem_calls diff --git a/docs/docs/en/api/faststream/utils/ast/is_contains_context_name.md b/docs/docs/en/api/faststream/utils/ast/is_contains_context_name.md new file mode 100644 index 0000000000..61cf140ea6 --- /dev/null +++ b/docs/docs/en/api/faststream/utils/ast/is_contains_context_name.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.utils.ast.is_contains_context_name diff --git a/docs/docs/en/api/faststream/utils/functions/drop_response_type.md b/docs/docs/en/api/faststream/utils/functions/drop_response_type.md new file mode 100644 index 0000000000..a39e8a2699 --- /dev/null +++ b/docs/docs/en/api/faststream/utils/functions/drop_response_type.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.utils.functions.drop_response_type diff --git a/docs/docs/en/api/faststream/utils/functions/fake_context.md b/docs/docs/en/api/faststream/utils/functions/fake_context.md new file mode 100644 index 0000000000..3943186ba4 --- /dev/null +++ b/docs/docs/en/api/faststream/utils/functions/fake_context.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.utils.functions.fake_context diff --git a/docs/docs/en/getting-started/asyncapi/custom.md b/docs/docs/en/getting-started/asyncapi/custom.md index 08287a1a38..c9160e20d2 100644 --- a/docs/docs/en/getting-started/asyncapi/custom.md +++ b/docs/docs/en/getting-started/asyncapi/custom.md @@ -1,37 +1,45 @@ # Customizing AsyncAPI Documentation for FastStream -In this guide, we will explore how to customize AsyncAPI documentation for your FastStream application. Whether you want to add custom app info, broker information, handlers, or fine-tune payload details, we'll walk you through each step. +In this guide, we will explore how to customize **AsyncAPI** documentation for your **FastStream** application. Whether you want to add custom app info, broker information, handlers, or fine-tune payload details, we'll walk you through each step. ## Prerequisites -Before we dive into customization, ensure you have a basic FastStream application up and running. If you haven't done that yet, let's setup a simple appication right now. +Before we dive into customization, ensure you have a basic **FastStream** application up and running. If you haven't done that yet, let's setup a simple appication right now. Copy the following code in your basic.py file: ```python linenums="1" - {!> docs_src/getting_started/asyncapi/asyncapi_customization/basic.py !} +{!> docs_src/getting_started/asyncapi/asyncapi_customization/basic.py !} ``` -Now, when you run ```{!> docs_src/getting_started/asyncapi/serve.py [ln:17] !}``` you should see the following documentation: +Now, when you run +```shell +{! docs_src/getting_started/asyncapi/serve.py [ln:17] !} +``` +you should see the following documentation: ![HTML-page](../../../assets/img/AsyncAPI-basic-html-short.png){ loading=lazy } ## Setup Custom FastStream App Info -Let's start by customizing the app information that appears in your AsyncAPI documentation. This is a great way to give your documentation a personal touch. Here's how: +Let's start by customizing the app information that appears in your **AsyncAPI** documentation. This is a great way to give your documentation a personal touch. Here's how: -1. Locate the app configuration in your FastStream application. -1. Update the `title`, `version`, and `description` fields to reflect your application's details. -1. Save the changes. -1. Serve your FastStream app documentation. +1. Locate the app configuration in your **FastStream** application. +2. Update the `title`, `version`, and `description` fields to reflect your application's details. +3. Save the changes. +4. Serve your **FastStream** app documentation. -Copy the following code in your basic.py file, we have highligted the additional info passed to FastStream app: +Copy the following code in your basic.py file, we have highligted the additional info passed to **FastStream** app: ```python linenums="1" hl_lines="6-15" {!> docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py !} ``` -Now, when you run ```{!> docs_src/getting_started/asyncapi/serve.py [ln:17] !}``` you should see the following in your general app documentation: +Now, when you run +```shell +{! docs_src/getting_started/asyncapi/serve.py [ln:17] !} +``` +you should see the following in your general app documentation: ![HTML-page](../../../assets/img/AsyncAPI-custom-info.png){ loading=lazy } @@ -44,43 +52,51 @@ Now, your documentation reflects your application's identity and purpose. The next step is to customize broker information. This helps users understand the messaging system your application uses. Follow these steps: -1. Locate the broker configuration in your FastStream application. +1. Locate the broker configuration in your **FastStream** application. 1. Update the `description` field. 1. Update the `asyncapi_url` field with a non-sensitive URL if you want to conceal your broker's actual bootstrap server URL. 1. Save the changes. -1. Serve your FastStream app. +1. Serve your **FastStream** app. -Copy the following code in your basic.py file, we have highligted the additional info passed to the FastStream app broker: +Copy the following code in your basic.py file, we have highligted the additional info passed to the **FastStream** app broker: ```python linenums="1" hl_lines="5-9" {!> docs_src/getting_started/asyncapi/asyncapi_customization/custom_broker.py !} ``` -Now, when you run ```{!> docs_src/getting_started/asyncapi/serve.py [ln:17] !}``` you should see the description in your broker documentation: +Now, when you run +```shell +{! docs_src/getting_started/asyncapi/serve.py [ln:17] !} +``` +you should see the description in your broker documentation: ![HTML-page](../../../assets/img/AsyncAPI-custom-broker.png){ loading=lazy } -Your AsyncAPI documentation now provides clear insights into the messaging infrastructure you're using. +Your **AsyncAPI** documentation now provides clear insights into the messaging infrastructure you're using. ## Setup Custom Handler Information Customizing handler information helps users comprehend the purpose and behavior of each message handler. Here's how to do it: -1. Navigate to your handler definitions in your FastStream application. +1. Navigate to your handler definitions in your **FastStream** application. 1. Add descriptions to each handler using `description` field. 1. For subscriber, consumer function's docstring can be used as `description`. 1. Add titles to each handler using `title` field adhering to [URI format](https://datatracker.ietf.org/doc/html/rfc3986). 1. Add publishing schema to publisher handler using `schema` field. 1. Save the changes. -1. Serve your FastStream app. +1. Serve your **FastStream** app. -Copy the following code in your basic.py file, we have highligted the additional info passed to the FastStream app handlers: +Copy the following code in your basic.py file, we have highligted the additional info passed to the **FastStream** app handlers: ```python linenums="1" hl_lines="17-25 27-31" {!> docs_src/getting_started/asyncapi/asyncapi_customization/custom_handler.py !} ``` -Now, when you run ```{!> docs_src/getting_started/asyncapi/serve.py [ln:17] !}``` you should see the descriptions in your handlers: +Now, when you run +```shell +{!> docs_src/getting_started/asyncapi/serve.py [ln:17] !} +``` +you should see the descriptions in your handlers: ![HTML-page](../../../assets/img/AsyncAPI-custom-handler.png){ loading=lazy } @@ -91,10 +107,10 @@ Now, your documentation is enriched with meaningful details about each message h To describe your message payload effectively, you can use Pydantic models. Here's how: 1. Define Pydantic models for your message payloads. -1. Annotate these models with descriptions and examples. -1. Use these models as argument types or return types in your handlers. -1. Save the changes. -1. Serve your FastStream app. +2. Annotate these models with descriptions and examples. +3. Use these models as argument types or return types in your handlers. +4. Save the changes. +5. Serve your **FastStream** app. Copy the following code in your basic.py file, we have highligted the creation of payload info and you can see it being passed to the return type and the `msg` argument type in the `on_input_data` function: @@ -102,25 +118,35 @@ Copy the following code in your basic.py file, we have highligted the creation o {!> docs_src/getting_started/asyncapi/asyncapi_customization/payload_info.py !} ``` -Now, when you run ```{!> docs_src/getting_started/asyncapi/serve.py [ln:17] !}``` you should see the payload schema described in your documentation: +Now, when you run +```shell +{! docs_src/getting_started/asyncapi/serve.py [ln:17] !} +``` +you should see the payload schema described in your documentation: ![HTML-page](../../../assets/img/AsyncAPI-payload-info.png){ loading=lazy } -Your AsyncAPI documentation now showcases well-structured payload information. +Your **AsyncAPI** documentation now showcases well-structured payload information. -## Generate Schema.json, Customize Manually, and Serve It +## Generate Schema.json, customize and serve it To take customization to the next level, you can manually modify the schema.json file. Follow these steps: -1. Generate the initial schema.json by running ```{!> docs_src/getting_started/asyncapi/serve.py [ln:9] !}```. -1. Manually edit the asyncapi.json file to add custom fields, descriptions, and details. -1. Save your changes. -1. Serve your FastStream app with the updated asyncapi.json by running ```{!> docs_src/getting_started/asyncapi/serve.py [ln:21] !}```. +1. Generate the initial schema.json by running + ```shell + {! docs_src/getting_started/asyncapi/serve.py [ln:9] !} + ``` +2. Manually edit the `asyncapi.json` file to add custom fields, descriptions, and details. +3. Save your changes. +4. Serve your **FastStream** app with the updated asyncapi.json by running + ```shell + {! docs_src/getting_started/asyncapi/serve.py [ln:21] !} + ``` -Now, you have fine-tuned control over your AsyncAPI documentation. +Now, you have fine-tuned control over your **AsyncAPI** documentation. ## Conclusion -Customizing AsyncAPI documentation for your FastStream application not only enhances its appearance but also provides valuable insights to users. With these steps, you can create documentation that's not only informative but also uniquely yours. +Customizing **AsyncAPI** documentation for your **FastStream** application not only enhances its appearance but also provides valuable insights to users. With these steps, you can create documentation that's not only informative but also uniquely yours. -Happy coding with your customized FastStream AsyncAPI documentation! +Happy coding with your customized **FastStream** **AsyncAPI** documentation! diff --git a/docs/docs/en/getting-started/asyncapi/export.md b/docs/docs/en/getting-started/asyncapi/export.md index 3ab72cd847..91ab0506dd 100644 --- a/docs/docs/en/getting-started/asyncapi/export.md +++ b/docs/docs/en/getting-started/asyncapi/export.md @@ -28,7 +28,7 @@ If you prefer `yaml` instead of `json`, please run the following command to gene {!> docs_src/getting_started/asyncapi/serve.py[ln:13]!} ``` -!!! note +!!! tip To generate the documentation in yaml format, please install the necessary dependency to work with **YAML** file format at first. ``` shell diff --git a/docs/docs/en/getting-started/cli/index.md b/docs/docs/en/getting-started/cli/index.md index c9585a3a7f..0a8a820218 100644 --- a/docs/docs/en/getting-started/cli/index.md +++ b/docs/docs/en/getting-started/cli/index.md @@ -60,6 +60,9 @@ work with your project easily. Edit the code as much as you like - the new versi faststream run serve:app --reload ``` +!!! tip + {!> includes/en/watchfiles.md !} + ```{ .shell .no-copy } INFO - Started reloader process [7902] using WatchFiles INFO - FastStream app starting... @@ -68,6 +71,12 @@ INFO - FastStream app started successfully! To exit press CTRL+C ``` { data-search-exclude } +By default **FastStream** watches for `.py` file changes, but you can specify an extra file extensions to watch by (your config files as an example) + +```shell +faststream run serve:app --reload --reload-ext .yml --realod-ext .yaml +``` + ### Environment Management You can pass any custom flags and launch options to the **FastStream CLI** even without first registering them. Just use them when launching the application - and they will be right in your environment. @@ -97,11 +106,12 @@ All passed values can be of type `#!python bool`, `#!python str` or `#!python li In this case, the flags will be interpreted as follows: ```{ .shell .no-copy } -faststream run app:app --flag # flag = True -faststream run app:app --no-flag # flag = False -faststream run app:app --my-flag # my_flag = True -faststream run app:app --key value # key = "value" -faststream run app:app --key 1 2 # key = ["1", "2"] +faststream run app:app --flag # flag = True +faststream run app:app --no-flag # flag = False +faststream run app:app --my-flag # my_flag = True +faststream run app:app --key value # key = "value" +faststream run app:app --key 1 2 # key = ["1", "2"] +faststream run app:app --key 1 --key 2 # key = ["1", "2"] ``` { data-search-exclude } diff --git a/docs/docs/en/getting-started/context/custom.md b/docs/docs/en/getting-started/context/custom.md index d9c2f1f9ce..aac3ddacdd 100644 --- a/docs/docs/en/getting-started/context/custom.md +++ b/docs/docs/en/getting-started/context/custom.md @@ -26,6 +26,6 @@ To set a local context (available only within the message processing scope), use {!> includes/getting_started/context/custom_local.md !} -You can also set the context yourself, and it will remain within the current call stack until you clear it. +You can also set the context by yourself, and it will remain within the current call stack until you clear it. {!> includes/getting_started/context/manual_local.md !} diff --git a/docs/docs/en/getting-started/contributing/CONTRIBUTING.md b/docs/docs/en/getting-started/contributing/CONTRIBUTING.md index b722bb3f01..e0bd0c830b 100644 --- a/docs/docs/en/getting-started/contributing/CONTRIBUTING.md +++ b/docs/docs/en/getting-started/contributing/CONTRIBUTING.md @@ -44,17 +44,17 @@ After activating the virtual environment as described above, run: pip install -e ".[dev]" ``` -This will install all the dependencies and your local FastStream in your virtual environment. +This will install all the dependencies and your local **FastStream** in your virtual environment. -### Using Your local FastStream +### Using Your local **FastStream** -If you create a Python file that imports and uses FastStream, and run it with the Python from your local environment, it will use your local FastStream source code. +If you create a Python file that imports and uses **FastStream**, and run it with the Python from your local environment, it will use your local **FastStream** source code. -Whenever you update your local FastStream source code, it will automatically use the latest version when you run your Python file again. This is because it is installed with `-e`. +Whenever you update your local **FastStream** source code, it will automatically use the latest version when you run your Python file again. This is because it is installed with `-e`. This way, you don't have to "install" your local version to be able to test every change. -To use your local FastStream CLI, type: +To use your local **FastStream CLI**, type: ```bash python -m faststream ... @@ -64,7 +64,7 @@ python -m faststream ... ### Pytest -To run tests with your current FastStream application and Python environment, use: +To run tests with your current **FastStream** application and Python environment, use: ```bash pytest tests @@ -80,6 +80,7 @@ In your project, you'll find some *pytest marks*: * **rabbit** * **kafka** * **nats** +* **redis** * **all** By default, running *pytest* will execute "not slow" tests. @@ -93,7 +94,7 @@ pytest -m 'all' If you don't have a local broker instance running, you can run tests without those dependencies: ```bash -pytest -m 'not rabbit and not kafka and not nats' +pytest -m 'not rabbit and not kafka and not nats and not redis' ``` To run tests based on RabbitMQ, Kafka, or other dependencies, the following dependencies are needed to be started as docker containers: diff --git a/docs/docs/en/getting-started/integrations/django/index.md b/docs/docs/en/getting-started/integrations/django/index.md index 0bc5ccb2ca..04fdde094b 100644 --- a/docs/docs/en/getting-started/integrations/django/index.md +++ b/docs/docs/en/getting-started/integrations/django/index.md @@ -140,7 +140,7 @@ app = Starlette( from faststream.kafka import KafkaBroker - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings") broker = KafkaBroker() diff --git a/docs/docs/en/getting-started/lifespan/context.md b/docs/docs/en/getting-started/lifespan/context.md new file mode 100644 index 0000000000..9f54d97ebd --- /dev/null +++ b/docs/docs/en/getting-started/lifespan/context.md @@ -0,0 +1,21 @@ +# Lifespan Context Manager + +Also, you can define *startup* and *shutdown* logic using the `lifespan` parameter of the **FastSTream** app, and a "context manager" (I'll show you what that is in a second). + +Let's start with an example from [hooks page](./hooks.md#another-example){.internal-link} and refactor it using "context manager". + +We create an async function `lifespan()` with `yield` like this: + +{! includes/getting_started/lifespan/ml_context.md !} + +As you can see, `lifespan` parameter is much suitable for case (than `#!python @app.on_startup` and `#!python @app.after_shutdown` separated calls) if you have object needs to process at application startup and shutdown both. + +!!! tip + `lifespan` starts **BEFORE** your broken started (`#!python @app.on_startup` hook) and **AFTER** broker was shutdown (`#!python @app.after_shutdown`), so you can't publish any messages here. + + If you want to make some actions will *already/still running broker*, please use `#!python @app.after_startup` and `#!python @app.on_shutdown` hooks. + +Also, `lifespan` supports all **FastStream** hooks features: + +* Dependency Injection +* [extra **CLI**](../cli/index.md#environment-management){.internal-link} options passing diff --git a/docs/docs/en/getting-started/lifespan/test.md b/docs/docs/en/getting-started/lifespan/test.md index 58887e1d65..19c89d3bc3 100644 --- a/docs/docs/en/getting-started/lifespan/test.md +++ b/docs/docs/en/getting-started/lifespan/test.md @@ -6,7 +6,13 @@ For this reason, **FastStream** has a special **TestApp** patcher working as a r {! includes/getting_started/lifespan/testing.md !} -!!! tip - If you are using a connected broker inside withing your lifespan hooks, it's advisable to patch the broker first (before applying the application patch). +## Using with **TestBroker** - Also, because `FastStream` calls `#!python broker.start()` inside, you need to prevent `TestClient` broker starting to respect the original lifespan hooks ordering by `#!python connect_only=True` option. Without this one, all `FastStream` hooks will be called after broker was started, what can breaks some `@app.on_startup` logic. +If you want to use In-Memory patched broker in your tests, it's advisable to patch the broker first (before applying the application patch). + +Also, **TestApp** and **TestBroker** are calling `#!python broker.start()` both. According to the original logic, broker should be started in the `FastStream` application, but **TestBroker** applied first breaks this behavior. This reason **TestApp** prevents **TestBroker** `#!python broker.start()` call if it placed incide **TestBroker** context. + +This behavior is ruled by `connect_only` **TestBroker** argument. By default it has `#!python None` value, but **TestApp** can set it to `True/False` by inner logic. To prevent this "magic", just setup `connect_only` argument manually. + +!!! warning + With `#!python connect_only=False`, all `FastStream` hooks will be called after **broker was started**, what can breaks some `@app.on_startup` logic. diff --git a/docs/docs/en/getting-started/logging.md b/docs/docs/en/getting-started/logging.md index efd23ebe28..46746d6fcb 100644 --- a/docs/docs/en/getting-started/logging.md +++ b/docs/docs/en/getting-started/logging.md @@ -200,9 +200,10 @@ app = FastStream(broker, logger=logger) And the job is done! Now you have a perfectly structured logs using **Structlog**. -```bash +```{.shell .no-copy} TIMESPAMP [info ] FastStream app starting... extra={} TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group', 'message_id': ''} TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group2', 'message_id': ''} TIMESPAMP [info ] FastStream app started successfully! To exit, press CTRL+C extra={'topic': '', 'group_id': '', 'message_id': ''} ``` +{ data-search-exclude } diff --git a/docs/docs/en/getting-started/serialization/examples.md b/docs/docs/en/getting-started/serialization/examples.md index 00f36eb5c4..552bf125f8 100644 --- a/docs/docs/en/getting-started/serialization/examples.md +++ b/docs/docs/en/getting-started/serialization/examples.md @@ -93,7 +93,7 @@ The contents of the `person.avsc` file are: Finally, let's use Avro's `schemaless_reader` and `schemaless_writer` to decode and encode messages in the `FastStream` app. -``` python linenums="1" hl_lines="1-2 24-27 30 37-40" +``` python linenums="1" hl_lines="1 3 25-28 31 40-42" {!> docs_src/getting_started/serialization/avro.py !} ``` diff --git a/docs/docs/en/kafka/Publisher/index.md b/docs/docs/en/kafka/Publisher/index.md index e8fa000f93..ae97cd74f5 100644 --- a/docs/docs/en/kafka/Publisher/index.md +++ b/docs/docs/en/kafka/Publisher/index.md @@ -14,15 +14,15 @@ You can specify the topic to send by its name. 1. Create your KafkaBroker instance -```python linenums="1" -{!> docs_src/kafka/raw_publish/example.py [ln:8] !} -``` + ```python linenums="1" + {!> docs_src/kafka/raw_publish/example.py [ln:8] !} + ``` -2. Publish a message using the `publish` method +1. Publish a message using the `publish` method -```python linenums="1" -{!> docs_src/kafka/raw_publish/example.py [ln:26-32] !} -``` + ```python linenums="1" + {!> docs_src/kafka/raw_publish/example.py [ln:26-32] !} + ``` This is the most basic way of using the KafkaBroker to publish a message. @@ -32,21 +32,21 @@ The simplest way to use a KafkaBroker for publishing has a significant limitatio 1. Create your KafkaBroker instance -```python linenums="1" -{!> docs_src/kafka/publisher_object/example.py [ln:8] !} -``` + ```python linenums="1" + {!> docs_src/kafka/publisher_object/example.py [ln:8] !} + ``` -2. Create a publisher instance +1. Create a publisher instance -```python linenums="1" -{!> docs_src/kafka/publisher_object/example.py [ln:17] !} -``` + ```python linenums="1" + {!> docs_src/kafka/publisher_object/example.py [ln:17] !} + ``` -2. Publish a message using the `publish` method of the prepared publisher +1. Publish a message using the `publish` method of the prepared publisher -```python linenums="1" -{!> docs_src/kafka/publisher_object/example.py [ln:26-31] !} -``` + ```python linenums="1" + {!> docs_src/kafka/publisher_object/example.py [ln:26-31] !} + ``` Now, when you wrap your broker into a FastStream object, the publisher will be exported to the AsyncAPI documentation. @@ -66,24 +66,24 @@ Let's start by examining the entire application that utilizes the Publisher Deco 1. **Initialize the KafkaBroker instance:** Start by initializing a KafkaBroker instance with the necessary configuration, including Kafka broker address. -```python linenums="1" -{!> docs_src/kafka/publish_example/app.py [ln:13] !} -``` + ```python linenums="1" + {!> docs_src/kafka/publish_example/app.py [ln:13] !} + ``` -2. **Prepare your publisher object to use later as a decorator:** +1. **Prepare your publisher object to use later as a decorator:** -```python linenums="1" -{!> docs_src/kafka/publish_example/app.py [ln:17] !} -``` + ```python linenums="1" + {!> docs_src/kafka/publish_example/app.py [ln:17] !} + ``` -3. **Create your processing logic:** Write a function that will consume the incoming messages in the defined format and produce a response to the defined topic +1. **Create your processing logic:** Write a function that will consume the incoming messages in the defined format and produce a response to the defined topic -```python linenums="1" -{!> docs_src/kafka/publish_example/app.py [ln:22-23] !} -``` + ```python linenums="1" + {!> docs_src/kafka/publish_example/app.py [ln:22-23] !} + ``` -4. **Decorate your processing function:** To connect your processing function to the desired Kafka topics you need to decorate it with `#!python @broker.subscriber` and `#!python @broker.publisher` decorators. Now, after you start your application, your processing function will be called whenever a new message in the subscribed topic is available and produce the function return value to the topic defined in the publisher decorator. +1. **Decorate your processing function:** To connect your processing function to the desired Kafka topics you need to decorate it with `#!python @broker.subscriber` and `#!python @broker.publisher` decorators. Now, after you start your application, your processing function will be called whenever a new message in the subscribed topic is available and produce the function return value to the topic defined in the publisher decorator. -```python linenums="1" -{!> docs_src/kafka/publish_example/app.py [ln:20-23] !} -``` + ```python linenums="1" + {!> docs_src/kafka/publish_example/app.py [ln:20-23] !} + ``` diff --git a/docs/docs/en/kafka/index.md b/docs/docs/en/kafka/index.md index ba1089b192..1f40856c2c 100644 --- a/docs/docs/en/kafka/index.md +++ b/docs/docs/en/kafka/index.md @@ -44,7 +44,7 @@ To connect to Kafka using the FastStream KafkaBroker module, follow these steps: Here's a simplified code example demonstrating how to establish a connection to Kafka using FastStream's KafkaBroker module: ```python linenums="1" -{!> docs_src/index/basic_kafka.py!} +{!> docs_src/index/kafka/basic.py!} ``` This minimal example illustrates how FastStream simplifies the process of connecting to Kafka and performing basic message processing from the **in_topic** to the **out-topic**. Depending on your specific use case and requirements, you can further customize your Kafka integration with FastStream to build robust and efficient streaming applications. diff --git a/docs/docs/en/redis/index.md b/docs/docs/en/redis/index.md new file mode 100644 index 0000000000..ef256fb22d --- /dev/null +++ b/docs/docs/en/redis/index.md @@ -0,0 +1,43 @@ +# Redis Broker + +## Redis Overview + +### What is Redis? + +[Redis](https://redis.io/){.external-link target="_blank"} is an open-source, in-memory data structure store, used as a database, cache, and message broker. It supports various data structures such as strings, hashes, lists, sets, sorted sets, bitmaps, hyperloglogs, and geospatial indexes with radius queries. Redis has built-in replication, Lua scripting, LRU eviction, transactions, and different levels of on-disk persistence, and provides high availability via Redis Sentinel and automatic partitioning with Redis Cluster. + +### Key Redis Concepts + +#### 1. Data Structures + +Redis is not just a key-value store; it is a data structures server, supporting different kinds of values. This makes Redis flexible and suitable for a wide range of problems. + +#### 2. Pub/Sub + +Redis Pub/Sub implements the Publish/Subscribe messaging paradigm where senders (publishers) are not programmed to send their messages to specific receivers (subscribers). Instead, published messages are characterized into channels, without knowledge of what (if any) subscribers there may be. + +## Redis in FastStream + +### FastStream RedisBroker + +The FastStream RedisBroker is a key component of the FastStream framework that enables seamless integration with Redis. With the RedisBroker, developers can easily connect to Redis instances, publish messages to Redis channels, and subscribe to Redis channels within their FastStream applications. + +### Establishing a Connection + +To connect to Redis using the FastStream RedisBroker module, follow these steps: + +1. **Initialize the RedisBroker instance:** Start by initializing a RedisBroker instance with the necessary configuration, including Redis server address and port. + +2. **Create your processing logic:** Write a function that will consume the incoming messages from the subscribed channel and optionally publish a response to another channel. + +3. **Decorate your processing function:** To connect your processing function to the desired Redis channels, you need to decorate it with `#!python @broker.subscriber` and `#!python @broker.publisher` decorators. Now, after you start your application, your processing function will be called whenever a new message in the subscribed channel is available and produce the function return value to the channel defined in the publisher decorator. + +Here's a simplified code example demonstrating how to establish a connection to Redis using FastStream's RedisBroker module: + +```python linenums="1" +{!> docs_src/index/redis/basic.py!} +``` + +This minimal example illustrates how FastStream simplifies the process of connecting to Redis and performing basic message processing from the **in-channel** to the **out-channel**. Depending on your specific use case and requirements, you can further customize your Redis integration with FastStream to build efficient and responsive applications. + +For more advanced configuration options and detailed usage instructions, please refer to the FastStream Redis documentation and the [official Redis documentation](https://redis.io/documentation){.external-link target="_blank"}. diff --git a/docs/docs/en/redis/list/batch.md b/docs/docs/en/redis/list/batch.md new file mode 100644 index 0000000000..30c36d9374 --- /dev/null +++ b/docs/docs/en/redis/list/batch.md @@ -0,0 +1,35 @@ +# Redis List Batch Subscriber + +If you want to consume data in batches from a Redis list, the `#!python @broker.subscriber(...)` decorator makes it possible. By defining your consumed `msg` object as a list of messages and setting the `batch` parameter to `True` within the `ListSub` object, the subscriber will call your consuming function with a batch of messages. Let's walk through how to achieve this with the FastStream library. + +## Using the Subscriber with Batching + +To consume messages in batches from a Redis list, follow these steps: + +### Step 1: Define Your Subscriber + +In your FastStream application, define the subscriber using the `#!python @broker.subscriber(...)` decorator. Ensure that you pass a `ListSub` object with the `batch` parameter set to `True`. This configuration tells the subscriber to handle message consumption in batches from the specified Redis list. + +```python linenums="1" +{!> docs_src/redis/list_sub_batch/app.py [ln:8] !} +``` + +### Step 2: Implement Your Consuming Function + +Create a consuming function that accepts the list of messages. The `#!python @broker.subscriber(...)` decorator will take care of collecting and grouping messages into batches. + +```python linenums="1" +{!> docs_src/redis/list_sub_batch/app.py [ln:8-10] !} +``` + +## Example of Consuming in Batches + +Let's illustrate how to consume messages in batches from the `#!python "test-list"` Redis list with a practical example: + +```python linenums="1" +{!> docs_src/redis/list_sub_batch/app.py !} +``` + +In this example, the subscriber is configured to process messages in batches from the Redis list, and the consuming function is designed to handle these batches efficiently. + +Consuming messages in batches is a valuable technique when you need to optimize the processing of high volumes of data in your Redis-based applications. It allows for more efficient resource utilization and can enhance the overall performance of your data processing tasks. diff --git a/docs/docs/en/redis/list/index.md b/docs/docs/en/redis/list/index.md new file mode 100644 index 0000000000..e2582ce91b --- /dev/null +++ b/docs/docs/en/redis/list/index.md @@ -0,0 +1,5 @@ +# Redis Lists + +Redis Lists are a simple and flexible data structure that function as ordered collections of strings. They are similar to lists in programming languages, and Redis provides commands to perform a variety of operations such as adding, retrieving, and removing elements from either end of the list. + +Redis Lists are particularly useful for scenarios such as implementing queues, effectively using the list as a FIFO (First-In-First-Out) structure. diff --git a/docs/docs/en/redis/list/publishing.md b/docs/docs/en/redis/list/publishing.md new file mode 100644 index 0000000000..fd439e6f6e --- /dev/null +++ b/docs/docs/en/redis/list/publishing.md @@ -0,0 +1,47 @@ +# Redis List Publishing with FastStream + +Utilizing the **FastStream** library, you can effectively publish data to Redis lists, which act as queues in Redis-based messaging systems. + +## Understanding Redis List Publishing + +Just like with Redis streams, messages can be published to Redis lists. FastStream utilizes the `@broker.publisher` decorator, along with a list's name, to push messages onto the list. + +1. Instantiate your RedisBroker + + ```python linenums="1" + {!> docs_src/redis/list_pub/app.py [ln:13] !} + ``` + +1. Create your FastStream application with the instantiated RedisBroker + + ```python linenums="1" + {!> docs_src/redis/list_pub/app.py [ln:14] !} + ``` + +1. Define a Pydantic model for your data + + ```python linenums="1" + {!> docs_src/redis/list_pub/app.py [ln:7-10] !} + ``` + +1. Implement a data processing function for publishing to Redis lists + + Use the `@broker.publisher(list="...")` decorator alongside the `@broker.subscriber(list="...")` decorator to create a function that processes incoming messages and pushes the results to an output list in Redis. + + ```python linenums="1" + {!> docs_src/redis/list_pub/app.py [ln:17-20] !} + ``` + +In this pattern, the function stands as a subscriber to the "input-list" and publishes the processed data as a new message to the "output-list." By using decorators, you establish a pipeline that reads messages from one Redis list, applies some logic, and then pushes outputs to another list. + +## Full Example of Redis List Publishing + +Here's an example that demonstrates Redis list publishing in action using decorators with FastStream: + +```python linenums="1" +{!> docs_src/redis/list_pub/app.py !} +``` + +The provided example illustrates the ease of setting up publishing mechanisms to interact with Redis lists. In this environment, messages are dequeued from the input list, processed, and enqueued onto the output list seamlessly, empowering developers to leverage Redis lists as messaging queues. + +By following these simple steps, you can perform list-based publish/subscribe operations in a Redis environment using the FastStream library, capitalizing on Redis' fast, in-memory data structure store capabilities. diff --git a/docs/docs/en/redis/list/subscription.md b/docs/docs/en/redis/list/subscription.md new file mode 100644 index 0000000000..32a99453e8 --- /dev/null +++ b/docs/docs/en/redis/list/subscription.md @@ -0,0 +1,41 @@ +# Redis List Basic Subscriber + +To start consuming from a **Redis** list, simply decorate your consuming function with the `#!python @broker.subscriber(...)` decorator, passing a string as the list key. + +In the following example, we will create a simple FastStream app that will consume messages from a `#!python "test-list"` Redis list. + +The full app code looks like this: + +```python linenums="1" +{!> docs_src/redis/list_sub/app.py [ln:1-10] !} +``` + +## Import FastStream and RedisBroker + +To use the `#!python @broker.subscriber(...)` decorator, first, we need to import the base FastStream app and RedisBroker to create our broker. + +```python linenums="1" +{!> docs_src/redis/list_sub/app.py [ln:1-2] !} +``` + +## Create a RedisBroker + +Next, we will create a `RedisBroker` object and wrap it into the `FastStream` object so that we can start our app using CLI later. + +```python linenums="1" +{!> docs_src/redis/list_sub/app.py [ln:4-5] !} +``` + +## Create a Function that will Consume Messages from a Redis list + +Let’s create a consumer function that will consume messages from `#!python "test-list"` Redis list and log them. + +```python linenums="1" +{!> docs_src/redis/list_sub/app.py [ln:8-10] !} +``` + +The function decorated with the `#!python @broker.subscriber(...)` decorator will be called when a message is pushed to the **Redis** list. + +The message will then be injected into the typed `msg` argument of the function, and its type will be used to parse the message. + +In this example case, when the message is pushed to a `#!python "test-list"` list, it will be received by the `handle` function, and the `logger` will log the message content. diff --git a/docs/docs/en/redis/message.md b/docs/docs/en/redis/message.md new file mode 100644 index 0000000000..9824391338 --- /dev/null +++ b/docs/docs/en/redis/message.md @@ -0,0 +1,48 @@ +# Accessing Redis Message Information with FastStream + +In **FastStream**, messages passed through a Redis broker are serialized and can be interacted with just like function parameters. However, you might occasionally need to access more than just the message content, such as metadata and other attributes. + +## Redis Message Access + +When dealing with Redis broker in FastStream, you can easily access message details by using the `RedisMessage` object which wraps the underlying message with additional context information. This object is specifically tailored for Redis and contains relevant message attributes: + +* `#!python body: Union[bytes, Any]` +* `#!python raw_message: Msg` +* `#!python decoded_body: Optional[DecodedMessage]` +* `#!python headers: AnyDict` +* `#!python path: AnyDict` +* `#!python content_type: Optional[str]` +* `#!python reply_to: str` +* `#!python message_id: str` +* `#!python correlation_id: str` +* `#!python processed: bool` +* `#!python commited: bool` + +For instance, if you need to retrieve headers from an incoming Redis message, here’s how you might do it: + +```python +from faststream.redis import RedisMessage + +@broker.subscriber("test-stream") +async def stream_handler(msg: str, message: RedisMessage): + print(message.headers) +``` + +## Targeted Message Fields Access + +It's common to require only specific elements of the message rather than the entire data structure. For this purpose, FastStream allows you to access individual message fields by specifying the field you are interested in as an argument in your handler function. + +For example, if you want to access the headers directly, you might do it as follows: + +```python +from faststream import Context + +@broker.subscriber("test-stream") +async def stream_handler( + msg: str, + headers: AnyDict = Context("message.headers"), +): + print(headers) +``` + +The `Context` object lets you reference message attributes directly, making your handler functions neater and reducing the amount of boilerplate code needed. diff --git a/docs/docs/en/redis/pubsub/index.md b/docs/docs/en/redis/pubsub/index.md new file mode 100644 index 0000000000..4000e58821 --- /dev/null +++ b/docs/docs/en/redis/pubsub/index.md @@ -0,0 +1,5 @@ +# Redis Channels + +Redis channels are a feature of Redis that enables messaging between clients through a publish/subscribe (pub/sub) pattern. A Redis channel is essentially a medium through which messages are transmitted. Different clients can subscribe to these channels to listen for messages, while other clients can publish messages to these channels. + +When a message is published to a Redis channel, all subscribers to that channel receive the message instantly. This makes Redis channels suitable for a variety of real-time applications such as chat rooms, notifications, live updates, and many more use cases where messages must be broadcast promptly to multiple clients. diff --git a/docs/docs/en/redis/pubsub/publishing.md b/docs/docs/en/redis/pubsub/publishing.md new file mode 100644 index 0000000000..60a1257319 --- /dev/null +++ b/docs/docs/en/redis/pubsub/publishing.md @@ -0,0 +1,87 @@ +# Publishing + +The **FastStream** `RedisBroker` supports all standard [publishing use cases](../../getting-started/publishing/index.md){.internal-link} similar to the `KafkaBroker`, allowing you to publish messages to Redis channels with ease. + +Below you will find guidance on how to utilize the `RedisBroker` for publishing messages, including creating publisher objects and using decorators for streamlined publishing workflows. + +## Basic Redis Channel Publishing + +The `RedisBroker` allows you to publish messages directly to Redis channels. You can use Python primitives and `pydantic.BaseModel` to define the content of the message. + +To publish a message to a Redis channel, follow these steps: + +1. Create your RedisBroker instance + + ```python linenums="1" + {!> docs_src/redis/publish/raw_publish.py [ln:15] !} + ``` + +1. Publish a message using the `publish` method + + ```python linenums="1" + {!> docs_src/redis/publish/raw_publish.py [ln:27-33] !} + ``` + +This is the most straightforward way to use the RedisBroker to publish messages to Redis channels. + +## Creating a publisher object + +For a more structured approach and to include your publishers in the AsyncAPI documentation, it's recommended to create publisher objects. Here's how to do it: + +1. Create your RedisBroker instance + + ```python linenums="1" + {!> docs_src/redis/publish/publisher_object.py [ln:8] !} + ``` + +1. Create a publisher instance for a specific channel + + ```python linenums="1" + {!> docs_src/redis/publish/publisher_object.py [ln:17] !} + ``` + +1. Publish a message using the `publish` method of the prepared publisher + + ```python linenums="1" + {!> docs_src/redis/publish/publisher_object.py [ln:26-31] !} + ``` + +When you encapsulate your broker within a FastStream object, the publisher will be documented in your service's AsyncAPI documentation. + +## Decorating your publishing functions + +Decorators in FastStream provide a convenient way to define the data flow within your application. The `RedisBroker` allows you to use decorators to publish messages to Redis channels, similar to the `KafkaBroker`. + +By decorating a function with both `@broker.subscriber` and `@broker.publisher`, you create a DataPipeline unit that processes incoming messages and publishes the results to another channel. The order of decorators does not matter, but they must be applied to a function that has already been decorated by a `@broker.subscriber`. + +The decorated function should have a return type annotation to ensure the correct interpretation of the return value before it's published. + +Here's an example of using decorators with RedisBroker: + +```python linenums="1" +{!> docs_src/redis/publish/publisher_decorator.py !} +``` + +1. **Initialize the RedisBroker instance:** Start by creating a RedisBroker instance. + + ```python linenums="1" + {!> docs_src/redis/publish/publisher_decorator.py [ln:13] !} + ``` + +1. **Prepare your publisher object to be used as a decorator:** + + ```python linenums="1" + {!> docs_src/redis/publish/publisher_decorator.py [ln:17] !} + ``` + +1. **Create your processing logic:** Implement a function that will process incoming messages and produce a response to be published to another Redis channel. + + ```python linenums="1" + {!> docs_src/redis/publish/publisher_decorator.py [ln:22-23] !} + ``` + +1. **Decorate your processing function:** Apply the `@broker.subscriber` and `@broker.publisher` decorators to your function to define the input channel and the output channel, respectively. Once your application is running, this decorated function will be triggered whenever a new message arrives on the "input_data" channel, and it will publish the result to the "output_data" channel. + + ```python linenums="1" + {!> docs_src/redis/publish/publisher_decorator.py [ln:20-23] !} + ``` diff --git a/docs/docs/en/redis/pubsub/subscription.md b/docs/docs/en/redis/pubsub/subscription.md new file mode 100644 index 0000000000..819483c833 --- /dev/null +++ b/docs/docs/en/redis/pubsub/subscription.md @@ -0,0 +1,75 @@ +# Channel Subscription + +## Basic Channel Subscription + +To start consuming messages from a **Redis** channel, decorate your consumer function with the `#!python @broker.subscriber("channel_name")` decorator, specifying the channel name as a string. + +In this example, we will build a FastStream application that listens to messages from the Redis channel named `#!python "test"`. + +The complete application code is presented below: + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub.py!} +``` + +### Import FastStream and RedisBroker + +To utilize the `#!python @broker.subscriber(...)` decorator for Redis channel subscription, you must first import FastStream and RedisBroker. + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub.py [ln:1-2]!} +``` + +### Create a RedisBroker Instance + +Create a `#!python RedisBroker` object and pass it to the `FastStream` object. This setup prepares the application for launch using the FastStream CLI. + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub.py [ln:4-5]!} +``` + +### Define the Message Handler Function + +Construct a function that will act as the consumer of messages from the `#!python "test"` channel and use the logger to output the message content. + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub.py [ln:8-10]!} +``` + +When a message is published to the **Redis** channel `#!python "test"`, it will trigger the invocation of the decorated function. The message will be passed to the function's `msg` parameter, while the logger will be available for logging purposes. + +## Pattern Channel Subscription + +For subscribing to multiple Redis channels matching a pattern, use the `#!python @broker.subscriber(channel=PubSub("pattern", pattern=True))` decorator, where the channel argument receives a `PubSub` object with the pattern and pattern flag set to True. + +Here's how to create a FastStream application that subscribes to all channels matching the `#!python "test.*"` pattern: + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub_pattern.py!} +``` + +### Use PubSub for Pattern Matching + +Import the `PubSub` class from `faststream.redis` along with other necessary modules. + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub_pattern.py [ln:1-2] !} +``` + +### Specify the Pattern for Channel Subscription + +To define the pattern subscription, create a `PubSub` object with the desired pattern (`#!python "test.*"` in this case) and indicate that it's a pattern subscription by setting `pattern=True`. + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub_pattern.py [ln:8] !} +``` + +### Create the Pattern Message Handler Function + +Decide on a function that will act as the subscriber of messages from channels matching the specified pattern. Logging the messages is handled similarly as with basic channel subscription. + +```python linenums="1" +{!> docs_src/redis/subscribe/channel_sub_pattern.py [ln:8-10] !} +``` + +With pattern channel subscription, when a message is published to a channel that matches the specified pattern (`#!python "test.*"`), our handler function will be invoked. The message is delivered to the `msg` argument of the function, similar to how it works in basic channel subscriptions. diff --git a/docs/docs/en/redis/rpc.md b/docs/docs/en/redis/rpc.md new file mode 100644 index 0000000000..df9803eddf --- /dev/null +++ b/docs/docs/en/redis/rpc.md @@ -0,0 +1,45 @@ +# Redis RPC with FastStream + +**FastStream RedisBroker** provides the powerful capability to perform Remote Procedure Calls (RPC) using Redis. This feature enables you to send a message and await a response, effectively creating a synchronous request-response pattern over the inherently asynchronous Redis messaging system. Below is the guide to set up and utilize the Redis RPC publishing feature with FastStream. + +## RPC with Redis Overview + +In a traditional publish/subscribe setup, the publishing party sends messages without expecting any direct response from the subscribers. However, with RPC, the publisher sends a message and waits for a response from the subscriber, which can then be used for subsequent operations or processing. + +FastStream allows you to define RPC-style communication channels, lists, or streams by using the RedisBroker's publishing function with the `rpc` flag set to `True`. + +## Implementing Redis RPC in FastStream + +To implement Redis RPC with RedisBroker in FastStream, follow the steps below: + +1. Initiate your FastStream application with RedisBroker + + ```python linenums="1" + {!> docs_src/redis/rpc/app.py [ln:4-5] !} + ``` + +2. Define subscriber handlers for various Redis data types (e.g., channel, list, stream) that can process incoming messages and return responses. + + ```python linenums="1" + {!> docs_src/redis/rpc/app.py [ln:8-23] !} + ``` + +3. Send RPC messages through RedisBroker and await responses on the correct data type. + + After your application has started and the subscribers are ready to receive messages, you can publish messages with the `rpc` option enabled. Additionally, you can set an `rpc_timeout` to decide how long the publisher should wait for a response before timing out. + + ```python linenums="1" + {!> docs_src/redis/rpc/app.py [ln:26-49] !} + ``` + +In this example, we assert that the `msg` sent is the same as the response received from the subscriber, demonstrating an operational RPC pattern over three different Redis data types. + +## Full Example of Redis RPC with FastStream + +Combining all the code snippets above, here is the complete example of how to set up Redis RPC with FastStream RedisBroker: + +```python linenums="1" +{!> docs_src/redis/rpc/app.py !} +``` + +By embracing Redis RPC with FastStream, you can build sophisticated message-based architectures that require direct feedback from message processors. This feature is particularly suitable for cases where immediate processing is necessary or calling functions across different services is essential. diff --git a/docs/docs/en/redis/streams/ack.md b/docs/docs/en/redis/streams/ack.md new file mode 100644 index 0000000000..65646064e8 --- /dev/null +++ b/docs/docs/en/redis/streams/ack.md @@ -0,0 +1,38 @@ +# Stream Acknowledgement + +When working with *Redis* streams in the **FastStream** library, it's important to manage message acknowledgements carefully to ensure that messages are not lost and that they have been processed as intended. + +By default, when using the **FastStream** with a Redis stream, the library will automatically acknowledge (*ack*) that a message has been processed. This follows the *at most once* processing guarantee. + +## Manual Acknowledgement + +In cases where you want explicit control over when a message is acknowledged, you can manually acknowledge a message by accessing the `ack` and `nack` methods provided: + +```python +from faststream.redis.annotations import RedisMessage + +# Setup broker and faststream app +... + +@broker.subscriber(StreamSub("test-stream", group="test-group", consumer="1")) +async def base_handler(body: dict, msg: RedisMessage): + # Process the message + ... + + # Manually acknowledge the message + await msg.ack() + # or, if processing fails and you want to reprocess later + await msg.nack() +``` + +Using `ack` will mark the message as processed in the stream, while `nack` is useful for situations where you might need to reprocess a message due to a handling failure. + +## Interrupt Process + +If the need arises to instantly interrupt message processing at any point in the call stack and acknowledge the message, you can achieve this by raising the `faststream.exceptions.AckMessage` exception: + +``` python linenums="1" hl_lines="2 16" +{!> docs_src/redis/ack/errors.py !} +``` + +By raising `AckMessage`, **FastStream** will halt the current message processing routine and immediately acknowledge it. Analogously, raising `NackMessage` would prevent the message from being acknowledged and could lead to its subsequent reprocessing by the same or a different consumer. diff --git a/docs/docs/en/redis/streams/batch.md b/docs/docs/en/redis/streams/batch.md new file mode 100644 index 0000000000..83f986072b --- /dev/null +++ b/docs/docs/en/redis/streams/batch.md @@ -0,0 +1,35 @@ +# Redis Stream Batch Subscriber + +If you want to consume data in batches from a Redis stream, the `#!python @broker.subscriber(...)` decorator makes it possible. By defining your consumed `msg` object as a list of messages and setting the `batch` parameter to `True` within the `StreamSub` object, the subscriber will call your consuming function with a batch of messages. Let's walk through how to achieve this with the FastStream library. + +## Using the Subscriber with Batching + +To consume messages in batches from a Redis stream, follow these steps: + +### Step 1: Define Your Subscriber + +In your FastStream application, define the subscriber using the `#!python @broker.subscriber(...)` decorator. Ensure that you pass a `StreamSub` object with the `batch` parameter set to `True`. This configuration tells the subscriber to handle message consumption in batches from the specified Redis stream. + +```python linenums="1" +{!> docs_src/redis/stream_sub_batch/app.py [ln:8] !} +``` + +### Step 2: Implement Your Consuming Function + +Create a consuming function that accepts the list of messages. The `#!python @broker.subscriber(...)` decorator will take care of collecting and grouping messages into batches. + +```python linenums="1" +{!> docs_src/redis/stream_sub_batch/app.py [ln:8-10] !} +``` + +## Example of Consuming in Batches + +Let's illustrate how to consume messages in batches from the `#!python "test-stream"` Redis stream with a practical example: + +```python linenums="1" +{!> docs_src/redis/stream_sub_batch/app.py !} +``` + +In this example, the subscriber is configured to process messages in batches from the Redis stream, and the consuming function is designed to handle these batches efficiently. + +Consuming messages in batches is a valuable technique when you need to optimize the processing of high volumes of data in your Redis-based applications. It allows for more efficient resource utilization and can enhance the overall performance of your data processing tasks. diff --git a/docs/docs/en/redis/streams/groups.md b/docs/docs/en/redis/streams/groups.md new file mode 100644 index 0000000000..7c000b2f2f --- /dev/null +++ b/docs/docs/en/redis/streams/groups.md @@ -0,0 +1,47 @@ +# Redis Stream Consumer Groups + +Consuming messages from a **Redis** stream can be accomplished by using a Consumer Group. This allows multiple consumers to divide the workload of processing messages in a stream and provides a form of message acknowledgment, ensuring that messages are not processed repeatedly. + +Consumer Groups in Redis enable a group of clients to cooperatively consume different portions of the same stream of messages. When using `group="..."` (which internally uses `XREADGROUP`), messages are distributed among different consumers in a group and are not delivered to any other consumer in that group again, unless they are not acknowledged (i.e., the client fails to process and does not call `msg.ack()` or `XACK`). This is in contrast to a normal consumer (also known as `XREAD`), where every consumer sees all the messages. `XREAD` is useful for broadcasting to multiple consumers, while `XREADGROUP` is better suited for workload distribution. + +In the following example, we will create a simple FastStream app that utilizes a Redis stream with a Consumer Group. It will consume messages sent to the `test-stream` as part of the `test-group` consumer group. + +The full app code is as follows: + +```python linenums="1" +{!> docs_src/redis/stream_group/app.py !} +``` + +## Import FastStream and RedisBroker + +First, import the `FastStream` class and the `RedisBroker` from the `faststream.redis` module to define our broker. + +```python linenums="1" +{!> docs_src/redis/stream_group/app.py [ln:1-2] !} +``` + +## Create a RedisBroker + +To establish a connection to Redis, instantiate a `RedisBroker` object and pass it to the `FastStream` app. + +```python linenums="1" +{!> docs_src/redis/stream_group/app.py [ln:4-5] !} +``` + +## Define a Consumer Group Subscription + +Define a subscription to a Redis stream with a specific Consumer Group using the `StreamSub` object and the `@broker.subscriber(...)` decorator. Then, define a function that will be triggered when new messages are sent to the `test-stream` Redis stream. This function is decorated with `@broker.subscriber(...)` and will process the messages as part of the `test-group` consumer group. + +```python linenums="1" +{!> docs_src/redis/stream_group/app.py [ln:8-10] !} +``` + +## Publishing a message + +Publishing a message is the same as what's defined on [Stream Publishing](./publishing.md). + +```python linenums="1" +{!> docs_src/redis/stream_group/app.py [ln:15] !} +``` + +By following the steps and code examples provided above, you can create a FastStream application that consumes messages from a Redis stream using a Consumer Group for distributed message processing. diff --git a/docs/docs/en/redis/streams/index.md b/docs/docs/en/redis/streams/index.md new file mode 100644 index 0000000000..3709b04309 --- /dev/null +++ b/docs/docs/en/redis/streams/index.md @@ -0,0 +1,11 @@ +# Redis Streams + +Redis Streams are a data structure introduced in Redis 5.0 that offer a reliable and highly scalable way to handle streams of data. They are similar to logging systems like Apache Kafka, where data is stored in a log structure and can be consumed by multiple clients. Streams provide a sequence of ordered messages, and they are designed to handle a high volume of data by allowing partitioning and multiple consumers. + +A Redis Stream is a collection of entries, each having an ID (which includes a timestamp) and a set of key-value pairs representing the message data. Clients can add to a stream by generating a new entry and can read from a stream to consume its messages. + +Streams have unique features such as: + +- Persistence: Data in the stream are persisted and can be replayed by new consumers. +- Consumer Groups: Allow concurrent consumption and acknowledgment of data entries by multiple consumers, facilitating partitioned processing. +- Range Queries: Clients can query streams for data within a specific range of IDs. diff --git a/docs/docs/en/redis/streams/publishing.md b/docs/docs/en/redis/streams/publishing.md new file mode 100644 index 0000000000..4b191348d2 --- /dev/null +++ b/docs/docs/en/redis/streams/publishing.md @@ -0,0 +1,39 @@ +# Redis Stream Publishing with FastStream + +## Publishing Data to Redis Stream + +To publish messages to a Redis Stream, you implement a function that processes the incoming data and applies the `@broker.publisher` decorator along with the Redis stream name to it. The function will then publish its return value to the specified stream. + +1. Create your RedisBroker instance + + ```python linenums="1" + {!> docs_src/redis/stream_pub/app.py [ln:13] !} + ``` + +1. Initiate your FastStream application with the RedisBroker + + ```python linenums="1" + {!> docs_src/redis/stream_pub/app.py [ln:14] !} + ``` + +1. Define your data model + + ```python linenums="1" + {!> docs_src/redis/stream_pub/app.py [ln:7-10] !} + ``` + +1. Set up the function for data processing and publishing + + Using the `@broker.publisher()` decorator in conjunction with the `@broker.subscriber()` decorator allows seamless message processing and republishing to a different stream. + + ```python linenums="1" + {!> docs_src/redis/stream_pub/app.py [ln:17-20] !} + ``` + + By decorating a function with `@broker.publisher`, we tell FastStream to publish the function's returned data to the designated output stream. The defined function also serves as a subscriber to the `input-stream`, thereby setting up a straightforward data pipeline within Redis streams. + +Here's the complete example that showcases the use of decorators for both subscribing and publishing to Redis streams: + +```python linenums="1" +{!> docs_src/redis/stream_pub/app.py !} +``` diff --git a/docs/docs/en/redis/streams/subscription.md b/docs/docs/en/redis/streams/subscription.md new file mode 100644 index 0000000000..32b478d044 --- /dev/null +++ b/docs/docs/en/redis/streams/subscription.md @@ -0,0 +1,41 @@ +# Redis Stream Basic Subscriber + +To start consuming from a **Redis** stream, simply decorate your consuming function with the `#!python @broker.subscriber(...)` decorator, passing a string as the stream key. + +In the following example, we will create a simple FastStream app that will consume messages from a `#!python "test-stream"` Redis stream. + +The full app code looks like this: + +```python linenums="1" +{!> docs_src/redis/stream_sub/app.py !} +``` + +## Import FastStream and RedisBroker + +To use the `#!python @broker.subscriber(...)` decorator, first, we need to import the base FastStream app and RedisBroker to create our broker. + +```python linenums="1" +{!> docs_src/redis/stream_sub/app.py [ln:1-2] !} +``` + +## Create a RedisBroker + +Next, we will create a `RedisBroker` object and wrap it into the `FastStream` object so that we can start our app using CLI later. + +```python linenums="1" +{!> docs_src/redis/stream_sub/app.py [ln:4-5] !} +``` + +## Create a Function that will Consume Messages from a Redis stream + +Let’s create a consumer function that will consume messages from `#!python "test-stream"` Redis stream and log them. + +```python linenums="1" +{!> docs_src/redis/stream_sub/app.py [ln:8-10] !} +``` + +The function decorated with the `#!python @broker.subscriber(...)` decorator will be called when a message is produced to the **Redis** stream. + +The message will then be injected into the typed `msg` argument of the function, and its type will be used to parse the message. + +In this example case, when the message is sent to a `#!python "test-stream"` stream, it will be received by the `handle` function, and the `logger` will log the message content. diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 7dab599c36..eee79fa256 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,8 @@ hide: --- # Release Notes + + ## 0.2.15 ### What's Changed diff --git a/docs/docs/summary_template.txt b/docs/docs/navigation_template.txt similarity index 82% rename from docs/docs/summary_template.txt rename to docs/docs/navigation_template.txt index 2d3eb8c851..20259ac7ba 100644 --- a/docs/docs/summary_template.txt +++ b/docs/docs/navigation_template.txt @@ -1,7 +1,3 @@ ---- -search: - exclude: true ---- - [Features](faststream.md) - Tutorial - [Getting Started](getting-started/index.md) @@ -28,8 +24,9 @@ search: - [Decoder](getting-started/serialization/decoder.md) - [Examples](getting-started/serialization/examples.md) - [Lifespan](getting-started/lifespan/index.md) - - [Lifespan Hooks](getting-started/lifespan/hooks.md) - - [Lifespan Testing](getting-started/lifespan/test.md) + - [Hooks](getting-started/lifespan/hooks.md) + - [Context](getting-started/lifespan/context.md) + - [Testing](getting-started/lifespan/test.md) - [Middlewares](getting-started/middlewares/index.md) - AsyncAPI - [Schema Export](getting-started/asyncapi/export.md) @@ -77,6 +74,22 @@ search: - [Publishing](nats/publishing/index.md) - [RPC](nats/rpc.md) - [Message Information](nats/message.md) +- [Redis](redis/index.md) + - [Pub/Sub](redis/pubsub/index.md) + - [Subscription](redis/pubsub/subscription.md) + - [Publishing](redis/pubsub/publishing.md) + - [List](redis/list/index.md) + - [Subscription](redis/list/subscription.md) + - [Publishing](redis/list/publishing.md) + - [Batching](redis/list/batch.md) + - [Streams](redis/streams/index.md) + - [Subscription](redis/streams/subscription.md) + - [Publishing](redis/streams/publishing.md) + - [Groups](redis/streams/groups.md) + - [Batching](redis/streams/batch.md) + - [Acknowledgement](redis/streams/ack.md) + - [RPC](redis/rpc.md) + - [Message Information](redis/message.md) - [Reference - Code API](api/faststream/index.md) {api} - Contributing diff --git a/docs/docs_src/getting_started/cli/redis_context.py b/docs/docs_src/getting_started/cli/redis_context.py new file mode 100644 index 0000000000..79e5967247 --- /dev/null +++ b/docs/docs_src/getting_started/cli/redis_context.py @@ -0,0 +1,16 @@ +from faststream import FastStream, ContextRepo +from faststream.redis import RedisBroker +from pydantic_settings import BaseSettings + +broker = RedisBroker() + +app = FastStream(broker) + +class Settings(BaseSettings): + host: str = "redis://localhost:6379" + +@app.on_startup +async def setup(env: str, context: ContextRepo): + settings = Settings(_env_file=env) + await broker.connect(settings.host) + context.set_global("settings", settings) diff --git a/docs/docs_src/getting_started/context/extra_arguments_kafka.py b/docs/docs_src/getting_started/context/extra_arguments_kafka.py deleted file mode 100644 index 60e98051b1..0000000000 --- a/docs/docs_src/getting_started/context/extra_arguments_kafka.py +++ /dev/null @@ -1,23 +0,0 @@ -from faststream import Context, FastStream -from faststream.kafka import KafkaBroker -from faststream.kafka.annotations import ContextRepo - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -@broker.subscriber("test-topic") -async def handle( - secret: int = Context("secret_int"), - casted_secret: int = Context("secret_int", cast=True), - not_existed: None = Context("not_existed", default=None), -): - assert secret == "1" - assert casted_secret == 1 - assert not_existed is None - - -@app.after_startup -async def test(context: ContextRepo): - context.set_global("secret_int", "1") - await broker.publish("", "test-topic") diff --git a/docs/docs_src/getting_started/context/kafka/__init__.py b/docs/docs_src/getting_started/context/kafka/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/context/annotated_kafka.py b/docs/docs_src/getting_started/context/kafka/annotated.py similarity index 100% rename from docs/docs_src/getting_started/context/annotated_kafka.py rename to docs/docs_src/getting_started/context/kafka/annotated.py diff --git a/docs/docs_src/getting_started/context/base_kafka.py b/docs/docs_src/getting_started/context/kafka/base.py similarity index 100% rename from docs/docs_src/getting_started/context/base_kafka.py rename to docs/docs_src/getting_started/context/kafka/base.py diff --git a/docs/docs_src/getting_started/context/cast_kafka.py b/docs/docs_src/getting_started/context/kafka/cast.py similarity index 100% rename from docs/docs_src/getting_started/context/cast_kafka.py rename to docs/docs_src/getting_started/context/kafka/cast.py diff --git a/docs/docs_src/getting_started/context/custom_global_context_kafka.py b/docs/docs_src/getting_started/context/kafka/custom_global_context.py similarity index 100% rename from docs/docs_src/getting_started/context/custom_global_context_kafka.py rename to docs/docs_src/getting_started/context/kafka/custom_global_context.py diff --git a/docs/docs_src/getting_started/context/custom_local_context_kafka.py b/docs/docs_src/getting_started/context/kafka/custom_local_context.py similarity index 100% rename from docs/docs_src/getting_started/context/custom_local_context_kafka.py rename to docs/docs_src/getting_started/context/kafka/custom_local_context.py diff --git a/docs/docs_src/getting_started/context/default_arguments_kafka.py b/docs/docs_src/getting_started/context/kafka/default_arguments.py similarity index 100% rename from docs/docs_src/getting_started/context/default_arguments_kafka.py rename to docs/docs_src/getting_started/context/kafka/default_arguments.py diff --git a/docs/docs_src/getting_started/context/existed_context_kafka.py b/docs/docs_src/getting_started/context/kafka/existed_context.py similarity index 100% rename from docs/docs_src/getting_started/context/existed_context_kafka.py rename to docs/docs_src/getting_started/context/kafka/existed_context.py diff --git a/docs/docs_src/getting_started/context/fields_access_kafka.py b/docs/docs_src/getting_started/context/kafka/fields_access.py similarity index 100% rename from docs/docs_src/getting_started/context/fields_access_kafka.py rename to docs/docs_src/getting_started/context/kafka/fields_access.py diff --git a/docs/docs_src/getting_started/context/manual_local_context_kafka.py b/docs/docs_src/getting_started/context/kafka/manual_local_context.py similarity index 100% rename from docs/docs_src/getting_started/context/manual_local_context_kafka.py rename to docs/docs_src/getting_started/context/kafka/manual_local_context.py diff --git a/docs/docs_src/getting_started/context/nats/__init__.py b/docs/docs_src/getting_started/context/nats/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/context/annotated_nats.py b/docs/docs_src/getting_started/context/nats/annotated.py similarity index 100% rename from docs/docs_src/getting_started/context/annotated_nats.py rename to docs/docs_src/getting_started/context/nats/annotated.py diff --git a/docs/docs_src/getting_started/context/base_nats.py b/docs/docs_src/getting_started/context/nats/base.py similarity index 100% rename from docs/docs_src/getting_started/context/base_nats.py rename to docs/docs_src/getting_started/context/nats/base.py diff --git a/docs/docs_src/getting_started/context/cast_nats.py b/docs/docs_src/getting_started/context/nats/cast.py similarity index 100% rename from docs/docs_src/getting_started/context/cast_nats.py rename to docs/docs_src/getting_started/context/nats/cast.py diff --git a/docs/docs_src/getting_started/context/custom_global_context_nats.py b/docs/docs_src/getting_started/context/nats/custom_global_context.py similarity index 100% rename from docs/docs_src/getting_started/context/custom_global_context_nats.py rename to docs/docs_src/getting_started/context/nats/custom_global_context.py diff --git a/docs/docs_src/getting_started/context/custom_local_context_nats.py b/docs/docs_src/getting_started/context/nats/custom_local_context.py similarity index 100% rename from docs/docs_src/getting_started/context/custom_local_context_nats.py rename to docs/docs_src/getting_started/context/nats/custom_local_context.py diff --git a/docs/docs_src/getting_started/context/default_arguments_nats.py b/docs/docs_src/getting_started/context/nats/default_arguments.py similarity index 100% rename from docs/docs_src/getting_started/context/default_arguments_nats.py rename to docs/docs_src/getting_started/context/nats/default_arguments.py diff --git a/docs/docs_src/getting_started/context/existed_context_nats.py b/docs/docs_src/getting_started/context/nats/existed_context.py similarity index 100% rename from docs/docs_src/getting_started/context/existed_context_nats.py rename to docs/docs_src/getting_started/context/nats/existed_context.py diff --git a/docs/docs_src/getting_started/context/fields_access_nats.py b/docs/docs_src/getting_started/context/nats/fields_access.py similarity index 100% rename from docs/docs_src/getting_started/context/fields_access_nats.py rename to docs/docs_src/getting_started/context/nats/fields_access.py diff --git a/docs/docs_src/getting_started/context/manual_local_context_nats.py b/docs/docs_src/getting_started/context/nats/manual_local_context.py similarity index 100% rename from docs/docs_src/getting_started/context/manual_local_context_nats.py rename to docs/docs_src/getting_started/context/nats/manual_local_context.py diff --git a/docs/docs_src/getting_started/context/rabbit/__init__.py b/docs/docs_src/getting_started/context/rabbit/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/context/annotated_rabbit.py b/docs/docs_src/getting_started/context/rabbit/annotated.py similarity index 100% rename from docs/docs_src/getting_started/context/annotated_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/annotated.py diff --git a/docs/docs_src/getting_started/context/base_rabbit.py b/docs/docs_src/getting_started/context/rabbit/base.py similarity index 100% rename from docs/docs_src/getting_started/context/base_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/base.py diff --git a/docs/docs_src/getting_started/context/cast_rabbit.py b/docs/docs_src/getting_started/context/rabbit/cast.py similarity index 100% rename from docs/docs_src/getting_started/context/cast_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/cast.py diff --git a/docs/docs_src/getting_started/context/custom_global_context_rabbit.py b/docs/docs_src/getting_started/context/rabbit/custom_global_context.py similarity index 100% rename from docs/docs_src/getting_started/context/custom_global_context_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/custom_global_context.py diff --git a/docs/docs_src/getting_started/context/custom_local_context_rabbit.py b/docs/docs_src/getting_started/context/rabbit/custom_local_context.py similarity index 100% rename from docs/docs_src/getting_started/context/custom_local_context_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/custom_local_context.py diff --git a/docs/docs_src/getting_started/context/default_arguments_rabbit.py b/docs/docs_src/getting_started/context/rabbit/default_arguments.py similarity index 100% rename from docs/docs_src/getting_started/context/default_arguments_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/default_arguments.py diff --git a/docs/docs_src/getting_started/context/existed_context_rabbit.py b/docs/docs_src/getting_started/context/rabbit/existed_context.py similarity index 100% rename from docs/docs_src/getting_started/context/existed_context_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/existed_context.py diff --git a/docs/docs_src/getting_started/context/fields_access_rabbit.py b/docs/docs_src/getting_started/context/rabbit/fields_access.py similarity index 100% rename from docs/docs_src/getting_started/context/fields_access_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/fields_access.py diff --git a/docs/docs_src/getting_started/context/manual_local_context_rabbit.py b/docs/docs_src/getting_started/context/rabbit/manual_local_context.py similarity index 100% rename from docs/docs_src/getting_started/context/manual_local_context_rabbit.py rename to docs/docs_src/getting_started/context/rabbit/manual_local_context.py diff --git a/docs/docs_src/getting_started/context/redis/__init__.py b/docs/docs_src/getting_started/context/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/context/redis/annotated.py b/docs/docs_src/getting_started/context/redis/annotated.py new file mode 100644 index 0000000000..3605c3fb75 --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/annotated.py @@ -0,0 +1,18 @@ +from typing import Annotated + +from faststream import Context, FastStream +from faststream.redis import RedisBroker +from faststream.redis.message import RedisMessage + +Message = Annotated[RedisMessage, Context()] + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test") +async def base_handler( + body: str, + message: Message, # get access to raw message +): + ... diff --git a/docs/docs_src/getting_started/context/redis/base.py b/docs/docs_src/getting_started/context/redis/base.py new file mode 100644 index 0000000000..3039119f74 --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/base.py @@ -0,0 +1,13 @@ +from faststream import Context, FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test") +async def base_handler( + body: str, + message=Context(), # get access to raw message +): + ... diff --git a/docs/docs_src/getting_started/context/redis/cast.py b/docs/docs_src/getting_started/context/redis/cast.py new file mode 100644 index 0000000000..fbd5eaeb3b --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/cast.py @@ -0,0 +1,18 @@ +from faststream import Context, FastStream, context +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) +context.set_global("secret", "1") + +@broker.subscriber("test-channel") +async def handle( + secret: int = Context(), +): + assert secret == "1" + +@broker.subscriber("test-channel2") +async def handle_int( + secret: int = Context(cast=True), +): + assert secret == 1 diff --git a/docs/docs_src/getting_started/context/redis/custom_global_context.py b/docs/docs_src/getting_started/context/redis/custom_global_context.py new file mode 100644 index 0000000000..3dc7f0d3ca --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/custom_global_context.py @@ -0,0 +1,18 @@ +from faststream import FastStream, ContextRepo, Context +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle( + msg: str, + secret_str: str=Context(), +): + assert secret_str == "my-perfect-secret" # pragma: allowlist secret + + +@app.on_startup +async def set_global(context: ContextRepo): + context.set_global("secret_str", "my-perfect-secret") diff --git a/docs/docs_src/getting_started/context/redis/custom_local_context.py b/docs/docs_src/getting_started/context/redis/custom_local_context.py new file mode 100644 index 0000000000..4feb1eb438 --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/custom_local_context.py @@ -0,0 +1,24 @@ +from faststream import Context, FastStream, apply_types +from faststream.redis import RedisBroker +from faststream.redis.annotations import ContextRepo, RedisMessage + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle( + msg: str, + message: RedisMessage, + context: ContextRepo, +): + with context.scope("correlation_id", message.correlation_id): + call() + + +@apply_types +def call( + message: RedisMessage, + correlation_id=Context(), +): + assert correlation_id == message.correlation_id diff --git a/docs/docs_src/getting_started/context/redis/default_arguments.py b/docs/docs_src/getting_started/context/redis/default_arguments.py new file mode 100644 index 0000000000..0011085a61 --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/default_arguments.py @@ -0,0 +1,11 @@ +from faststream import Context, FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +@broker.subscriber("test-channel") +async def handle( + not_existed: None = Context("not_existed", default=None), +): + assert not_existed is None diff --git a/docs/docs_src/getting_started/context/redis/existed_context.py b/docs/docs_src/getting_started/context/redis/existed_context.py new file mode 100644 index 0000000000..a1af8c96c1 --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/existed_context.py @@ -0,0 +1,35 @@ +from faststream import Context, FastStream +from faststream.redis import RedisBroker +from faststream.redis.annotations import ( + ContextRepo, + RedisMessage, + Logger, + RedisBroker as BrokerAnnotation, +) + +broker_object = RedisBroker("redis://localhost:6379") +app = FastStream(broker_object) + + +@broker_object.subscriber("test-channel") +async def handle( + msg: str, + logger=Context(), + message=Context(), + broker=Context(), + context=Context(), +): + logger.info(message) + await broker.publish("test", "response") + + +@broker_object.subscriber("response-channel") +async def handle_response( + msg: str, + logger: Logger, + message: RedisMessage, + context: ContextRepo, + broker: BrokerAnnotation, +): + logger.info(message) + await broker.publish("test", "response") diff --git a/docs/docs_src/getting_started/context/redis/fields_access.py b/docs/docs_src/getting_started/context/redis/fields_access.py new file mode 100644 index 0000000000..94a4ada5b2 --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/fields_access.py @@ -0,0 +1,16 @@ +from faststream import Context, FastStream +from faststream.redis import RedisBroker +from faststream.redis.message import RedisMessage + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle( + msg: RedisMessage = Context("message"), + correlation_id: str = Context("message.correlation_id"), + user_header: str = Context("message.headers.user"), +): + assert msg.correlation_id == correlation_id + assert msg.headers["user"] == user_header diff --git a/docs/docs_src/getting_started/context/redis/manual_local_context.py b/docs/docs_src/getting_started/context/redis/manual_local_context.py new file mode 100644 index 0000000000..f52af02782 --- /dev/null +++ b/docs/docs_src/getting_started/context/redis/manual_local_context.py @@ -0,0 +1,25 @@ +from faststream import Context, FastStream, apply_types, context +from faststream.redis import RedisBroker +from faststream.redis.annotations import RedisMessage + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle( + msg: str, + message: RedisMessage, +): + tag = context.set_local("correlation_id", message.correlation_id) + call(tag) + + +@apply_types +def call( + tag, + message: RedisMessage, + correlation_id=Context(), +): + assert correlation_id == message.correlation_id + context.reset_local("correlation_id", tag) diff --git a/docs/docs_src/getting_started/dependencies/basic/redis/__init__.py b/docs/docs_src/getting_started/dependencies/basic/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/dependencies/basic/redis/depends.py b/docs/docs_src/getting_started/dependencies/basic/redis/depends.py new file mode 100644 index 0000000000..0b67bd8494 --- /dev/null +++ b/docs/docs_src/getting_started/dependencies/basic/redis/depends.py @@ -0,0 +1,12 @@ +from faststream import FastStream, Depends +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +def simple_dependency(): + return 1 + +@broker.subscriber("test") +async def handler(body: dict, d: int = Depends(simple_dependency)): + assert d == 1 diff --git a/docs/docs_src/getting_started/dependencies/basic/redis/nested_depends.py b/docs/docs_src/getting_started/dependencies/basic/redis/nested_depends.py new file mode 100644 index 0000000000..f25cd56936 --- /dev/null +++ b/docs/docs_src/getting_started/dependencies/basic/redis/nested_depends.py @@ -0,0 +1,18 @@ +from faststream import FastStream, Depends +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +def another_dependency(): + return 1 + +def simple_dependency(b: int = Depends(another_dependency)): # (1) + return b * 2 + +@broker.subscriber("test") +async def handler( + body: dict, + a: int = Depends(another_dependency), + b: int = Depends(simple_dependency)): + assert (a + b) == 3 diff --git a/docs/docs_src/getting_started/index/base_redis.py b/docs/docs_src/getting_started/index/base_redis.py new file mode 100644 index 0000000000..1ae08f22e4 --- /dev/null +++ b/docs/docs_src/getting_started/index/base_redis.py @@ -0,0 +1,11 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") + +app = FastStream(broker) + + +@broker.subscriber("test") +async def base_handler(body): + print(body) diff --git a/docs/docs_src/getting_started/lifespan/kafka/ml_context.py b/docs/docs_src/getting_started/lifespan/kafka/ml_context.py new file mode 100644 index 0000000000..351bf18a6c --- /dev/null +++ b/docs/docs_src/getting_started/lifespan/kafka/ml_context.py @@ -0,0 +1,31 @@ +from contextlib import asynccontextmanager + +from faststream import Context, ContextRepo, FastStream +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") + + +def fake_ml_model_answer(x: float): + return x * 42 + + +@asynccontextmanager +async def lifespan(context: ContextRepo): + # load fake ML model + ml_models = { "answer_to_everything": fake_ml_model_answer } + context.set_global("model", ml_models) + + yield + + # Clean up the ML models and release the resources + ml_models.clear() + + +@broker.subscriber("test") +async def predict(x: float, model=Context()): + result = model["answer_to_everything"](x) + return {"result": result} + + +app = FastStream(broker, lifespan=lifespan) diff --git a/docs/docs_src/getting_started/lifespan/kafka/testing.py b/docs/docs_src/getting_started/lifespan/kafka/testing.py index e843123480..f9aa216707 100644 --- a/docs/docs_src/getting_started/lifespan/kafka/testing.py +++ b/docs/docs_src/getting_started/lifespan/kafka/testing.py @@ -13,7 +13,9 @@ async def handle(): @pytest.mark.asyncio async def test_lifespan(): - async with TestKafkaBroker(app.broker, connect_only=True): - async with TestApp(app): - # test something - pass + async with ( + TestKafkaBroker(app.broker, connect_only=True), + TestApp(app), + ): + # test something + pass diff --git a/docs/docs_src/getting_started/lifespan/nats/ml_context.py b/docs/docs_src/getting_started/lifespan/nats/ml_context.py new file mode 100644 index 0000000000..c634da3003 --- /dev/null +++ b/docs/docs_src/getting_started/lifespan/nats/ml_context.py @@ -0,0 +1,31 @@ +from contextlib import asynccontextmanager + +from faststream import Context, ContextRepo, FastStream +from faststream.nats import NatsBroker + +broker = NatsBroker("nats://localhost:4222") + + +def fake_ml_model_answer(x: float): + return x * 42 + + +@asynccontextmanager +async def lifespan(context: ContextRepo): + # load fake ML model + ml_models = { "answer_to_everything": fake_ml_model_answer } + context.set_global("model", ml_models) + + yield + + # Clean up the ML models and release the resources + ml_models.clear() + + +@broker.subscriber("test") +async def predict(x: float, model=Context()): + result = model["answer_to_everything"](x) + return {"result": result} + + +app = FastStream(broker, lifespan=lifespan) diff --git a/docs/docs_src/getting_started/lifespan/nats/testing.py b/docs/docs_src/getting_started/lifespan/nats/testing.py index 5e42253e09..90005f2e4c 100644 --- a/docs/docs_src/getting_started/lifespan/nats/testing.py +++ b/docs/docs_src/getting_started/lifespan/nats/testing.py @@ -13,7 +13,9 @@ async def handle(): @pytest.mark.asyncio async def test_lifespan(): - async with TestNatsBroker(app.broker, connect_only=True): - async with TestApp(app): - # test something - pass + async with ( + TestNatsBroker(app.broker, connect_only=True), + TestApp(app), + ): + # test something + pass diff --git a/docs/docs_src/getting_started/lifespan/rabbit/ml_context.py b/docs/docs_src/getting_started/lifespan/rabbit/ml_context.py new file mode 100644 index 0000000000..c679f0baea --- /dev/null +++ b/docs/docs_src/getting_started/lifespan/rabbit/ml_context.py @@ -0,0 +1,31 @@ +from contextlib import asynccontextmanager + +from faststream import Context, ContextRepo, FastStream +from faststream.rabbit import RabbitBroker + +broker = RabbitBroker("amqp://guest:guest@localhost:5672/") + + +def fake_ml_model_answer(x: float): + return x * 42 + + +@asynccontextmanager +async def lifespan(context: ContextRepo): + # load fake ML model + ml_models = { "answer_to_everything": fake_ml_model_answer } + context.set_global("model", ml_models) + + yield + + # Clean up the ML models and release the resources + ml_models.clear() + + +@broker.subscriber("test") +async def predict(x: float, model=Context()): + result = model["answer_to_everything"](x) + return {"result": result} + + +app = FastStream(broker, lifespan=lifespan) diff --git a/docs/docs_src/getting_started/lifespan/rabbit/testing.py b/docs/docs_src/getting_started/lifespan/rabbit/testing.py index 30581969eb..f294d1d9c8 100644 --- a/docs/docs_src/getting_started/lifespan/rabbit/testing.py +++ b/docs/docs_src/getting_started/lifespan/rabbit/testing.py @@ -13,7 +13,9 @@ async def handle(): @pytest.mark.asyncio async def test_lifespan(): - async with TestRabbitBroker(app.broker, connect_only=True): - async with TestApp(app): - # test something - pass + async with ( + TestRabbitBroker(app.broker, connect_only=True), + TestApp(app), + ): + # test something + pass diff --git a/docs/docs_src/getting_started/lifespan/redis/__init__.py b/docs/docs_src/getting_started/lifespan/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/lifespan/redis/basic.py b/docs/docs_src/getting_started/lifespan/redis/basic.py new file mode 100644 index 0000000000..3ca0d6ec80 --- /dev/null +++ b/docs/docs_src/getting_started/lifespan/redis/basic.py @@ -0,0 +1,18 @@ +from pydantic_settings import BaseSettings + +from faststream import ContextRepo, FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +class Settings(BaseSettings): + host: str = "redis://localhost:6379" + + +@app.on_startup +async def setup(context: ContextRepo, env: str = ".env"): + settings = Settings(_env_file=env) + context.set_global("settings", settings) + await broker.connect(settings.host) diff --git a/docs/docs_src/getting_started/lifespan/redis/ml.py b/docs/docs_src/getting_started/lifespan/redis/ml.py new file mode 100644 index 0000000000..c6ee2071e7 --- /dev/null +++ b/docs/docs_src/getting_started/lifespan/redis/ml.py @@ -0,0 +1,30 @@ +from faststream import Context, ContextRepo, FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +ml_models = {} # fake ML model + + +def fake_answer_to_everything_ml_model(x: float): + return x * 42 + + +@app.on_startup +async def setup_model(context: ContextRepo): + # Load the ML model + ml_models["answer_to_everything"] = fake_answer_to_everything_ml_model + context.set_global("model", ml_models) + + +@app.on_shutdown +async def shutdown_model(model: dict = Context()): + # Clean up the ML models and release the resources + model.clear() + + +@broker.subscriber("test") +async def predict(x: float, model=Context()): + result = model["answer_to_everything"](x) + return {"result": result} diff --git a/docs/docs_src/getting_started/lifespan/redis/ml_context.py b/docs/docs_src/getting_started/lifespan/redis/ml_context.py new file mode 100644 index 0000000000..442bb81156 --- /dev/null +++ b/docs/docs_src/getting_started/lifespan/redis/ml_context.py @@ -0,0 +1,31 @@ +from contextlib import asynccontextmanager + +from faststream import Context, ContextRepo, FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") + + +def fake_ml_model_answer(x: float): + return x * 42 + + +@asynccontextmanager +async def lifespan(context: ContextRepo): + # load fake ML model + ml_models = { "answer_to_everything": fake_ml_model_answer } + context.set_global("model", ml_models) + + yield + + # Clean up the ML models and release the resources + ml_models.clear() + + +@broker.subscriber("test") +async def predict(x: float, model=Context()): + result = model["answer_to_everything"](x) + return {"result": result} + + +app = FastStream(broker, lifespan=lifespan) diff --git a/docs/docs_src/getting_started/lifespan/redis/testing.py b/docs/docs_src/getting_started/lifespan/redis/testing.py new file mode 100644 index 0000000000..f0eb893a72 --- /dev/null +++ b/docs/docs_src/getting_started/lifespan/redis/testing.py @@ -0,0 +1,21 @@ +import pytest + +from faststream import FastStream, TestApp +from faststream.redis import RedisBroker, TestRedisBroker + +app = FastStream(RedisBroker()) + + +@app.after_startup +async def handle(): + print("Calls in tests too!") + + +@pytest.mark.asyncio +async def test_lifespan(): + async with ( + TestRedisBroker(app.broker, connect_only=True), + TestApp(app), + ): + # test something + pass diff --git a/docs/docs_src/getting_started/publishing/kafka/__init__.py b/docs/docs_src/getting_started/publishing/kafka/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/publishing/broker_kafka.py b/docs/docs_src/getting_started/publishing/kafka/broker.py similarity index 100% rename from docs/docs_src/getting_started/publishing/broker_kafka.py rename to docs/docs_src/getting_started/publishing/kafka/broker.py diff --git a/docs/docs_src/getting_started/publishing/broker_context_kafka.py b/docs/docs_src/getting_started/publishing/kafka/broker_context.py similarity index 100% rename from docs/docs_src/getting_started/publishing/broker_context_kafka.py rename to docs/docs_src/getting_started/publishing/kafka/broker_context.py diff --git a/docs/docs_src/getting_started/publishing/decorator_kafka.py b/docs/docs_src/getting_started/publishing/kafka/decorator.py similarity index 100% rename from docs/docs_src/getting_started/publishing/decorator_kafka.py rename to docs/docs_src/getting_started/publishing/kafka/decorator.py diff --git a/docs/docs_src/getting_started/publishing/direct_kafka.py b/docs/docs_src/getting_started/publishing/kafka/direct.py similarity index 100% rename from docs/docs_src/getting_started/publishing/direct_kafka.py rename to docs/docs_src/getting_started/publishing/kafka/direct.py diff --git a/docs/docs_src/getting_started/publishing/object_kafka_testing.py b/docs/docs_src/getting_started/publishing/kafka/direct_testing.py similarity index 85% rename from docs/docs_src/getting_started/publishing/object_kafka_testing.py rename to docs/docs_src/getting_started/publishing/kafka/direct_testing.py index 6af76beb73..b8c55c0411 100644 --- a/docs/docs_src/getting_started/publishing/object_kafka_testing.py +++ b/docs/docs_src/getting_started/publishing/kafka/direct_testing.py @@ -2,7 +2,7 @@ from faststream.kafka import TestKafkaBroker -from .object_kafka import broker, publisher +from .direct import broker, publisher @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/publishing/object_kafka.py b/docs/docs_src/getting_started/publishing/kafka/object.py similarity index 100% rename from docs/docs_src/getting_started/publishing/object_kafka.py rename to docs/docs_src/getting_started/publishing/kafka/object.py diff --git a/docs/docs_src/getting_started/publishing/direct_kafka_testing.py b/docs/docs_src/getting_started/publishing/kafka/object_testing.py similarity index 85% rename from docs/docs_src/getting_started/publishing/direct_kafka_testing.py rename to docs/docs_src/getting_started/publishing/kafka/object_testing.py index c937d37d26..5808c1c0f3 100644 --- a/docs/docs_src/getting_started/publishing/direct_kafka_testing.py +++ b/docs/docs_src/getting_started/publishing/kafka/object_testing.py @@ -2,7 +2,7 @@ from faststream.kafka import TestKafkaBroker -from .direct_kafka import broker, publisher +from .object import broker, publisher @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/publishing/nats/__init__.py b/docs/docs_src/getting_started/publishing/nats/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/publishing/broker_nats.py b/docs/docs_src/getting_started/publishing/nats/broker.py similarity index 100% rename from docs/docs_src/getting_started/publishing/broker_nats.py rename to docs/docs_src/getting_started/publishing/nats/broker.py diff --git a/docs/docs_src/getting_started/publishing/broker_context_nats.py b/docs/docs_src/getting_started/publishing/nats/broker_context.py similarity index 100% rename from docs/docs_src/getting_started/publishing/broker_context_nats.py rename to docs/docs_src/getting_started/publishing/nats/broker_context.py diff --git a/docs/docs_src/getting_started/publishing/decorator_nats.py b/docs/docs_src/getting_started/publishing/nats/decorator.py similarity index 100% rename from docs/docs_src/getting_started/publishing/decorator_nats.py rename to docs/docs_src/getting_started/publishing/nats/decorator.py diff --git a/docs/docs_src/getting_started/publishing/direct_nats.py b/docs/docs_src/getting_started/publishing/nats/direct.py similarity index 100% rename from docs/docs_src/getting_started/publishing/direct_nats.py rename to docs/docs_src/getting_started/publishing/nats/direct.py diff --git a/docs/docs_src/getting_started/publishing/direct_nats_testing.py b/docs/docs_src/getting_started/publishing/nats/direct_testing.py similarity index 85% rename from docs/docs_src/getting_started/publishing/direct_nats_testing.py rename to docs/docs_src/getting_started/publishing/nats/direct_testing.py index ed0e1beb64..6f47b7ecb9 100644 --- a/docs/docs_src/getting_started/publishing/direct_nats_testing.py +++ b/docs/docs_src/getting_started/publishing/nats/direct_testing.py @@ -2,7 +2,7 @@ from faststream.nats import TestNatsBroker -from .direct_nats import broker, publisher +from .direct import broker, publisher @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/publishing/object_nats.py b/docs/docs_src/getting_started/publishing/nats/object.py similarity index 100% rename from docs/docs_src/getting_started/publishing/object_nats.py rename to docs/docs_src/getting_started/publishing/nats/object.py diff --git a/docs/docs_src/getting_started/publishing/object_nats_testing.py b/docs/docs_src/getting_started/publishing/nats/object_testing.py similarity index 85% rename from docs/docs_src/getting_started/publishing/object_nats_testing.py rename to docs/docs_src/getting_started/publishing/nats/object_testing.py index 5b7986d3c8..e3194c83e4 100644 --- a/docs/docs_src/getting_started/publishing/object_nats_testing.py +++ b/docs/docs_src/getting_started/publishing/nats/object_testing.py @@ -2,7 +2,7 @@ from faststream.nats import TestNatsBroker -from .object_nats import broker, publisher +from .object import broker, publisher @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/publishing/rabbit/__init__.py b/docs/docs_src/getting_started/publishing/rabbit/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/publishing/broker_rabbit.py b/docs/docs_src/getting_started/publishing/rabbit/broker.py similarity index 100% rename from docs/docs_src/getting_started/publishing/broker_rabbit.py rename to docs/docs_src/getting_started/publishing/rabbit/broker.py diff --git a/docs/docs_src/getting_started/publishing/broker_context_rabbit.py b/docs/docs_src/getting_started/publishing/rabbit/broker_context.py similarity index 100% rename from docs/docs_src/getting_started/publishing/broker_context_rabbit.py rename to docs/docs_src/getting_started/publishing/rabbit/broker_context.py diff --git a/docs/docs_src/getting_started/publishing/decorator_rabbit.py b/docs/docs_src/getting_started/publishing/rabbit/decorator.py similarity index 100% rename from docs/docs_src/getting_started/publishing/decorator_rabbit.py rename to docs/docs_src/getting_started/publishing/rabbit/decorator.py diff --git a/docs/docs_src/getting_started/publishing/direct_rabbit.py b/docs/docs_src/getting_started/publishing/rabbit/direct.py similarity index 100% rename from docs/docs_src/getting_started/publishing/direct_rabbit.py rename to docs/docs_src/getting_started/publishing/rabbit/direct.py diff --git a/docs/docs_src/getting_started/publishing/object_rabbit_testing.py b/docs/docs_src/getting_started/publishing/rabbit/direct_testing.py similarity index 85% rename from docs/docs_src/getting_started/publishing/object_rabbit_testing.py rename to docs/docs_src/getting_started/publishing/rabbit/direct_testing.py index 44493ced19..2844a53b6e 100644 --- a/docs/docs_src/getting_started/publishing/object_rabbit_testing.py +++ b/docs/docs_src/getting_started/publishing/rabbit/direct_testing.py @@ -2,7 +2,7 @@ from faststream.rabbit import TestRabbitBroker -from .object_rabbit import broker, publisher +from .direct import broker, publisher @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/publishing/object_rabbit.py b/docs/docs_src/getting_started/publishing/rabbit/object.py similarity index 100% rename from docs/docs_src/getting_started/publishing/object_rabbit.py rename to docs/docs_src/getting_started/publishing/rabbit/object.py diff --git a/docs/docs_src/getting_started/publishing/direct_rabbit_testing.py b/docs/docs_src/getting_started/publishing/rabbit/object_testing.py similarity index 85% rename from docs/docs_src/getting_started/publishing/direct_rabbit_testing.py rename to docs/docs_src/getting_started/publishing/rabbit/object_testing.py index 25fe9eda73..e693f24edb 100644 --- a/docs/docs_src/getting_started/publishing/direct_rabbit_testing.py +++ b/docs/docs_src/getting_started/publishing/rabbit/object_testing.py @@ -2,7 +2,7 @@ from faststream.rabbit import TestRabbitBroker -from .direct_rabbit import broker, publisher +from .object import broker, publisher @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/publishing/redis/__init__.py b/docs/docs_src/getting_started/publishing/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/publishing/redis/broker.py b/docs/docs_src/getting_started/publishing/redis/broker.py new file mode 100644 index 0000000000..17549948ec --- /dev/null +++ b/docs/docs_src/getting_started/publishing/redis/broker.py @@ -0,0 +1,20 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle(): + await broker.publish("Hi!", channel="another-channel") + + +@broker.subscriber("another-channel") +async def handle_next(msg: str): + assert msg == "Hi!" + + +@app.after_startup +async def test(): + await broker.publish("", channel="test-channel") diff --git a/docs/docs_src/getting_started/publishing/redis/broker_context.py b/docs/docs_src/getting_started/publishing/redis/broker_context.py new file mode 100644 index 0000000000..1be3c9b4f2 --- /dev/null +++ b/docs/docs_src/getting_started/publishing/redis/broker_context.py @@ -0,0 +1,16 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle(msg: str): + assert msg == "Hi!" + + +@app.after_startup +async def test(): + async with RedisBroker("redis://localhost:6379") as br: + await br.publish("Hi!", channel="test-channel") diff --git a/docs/docs_src/getting_started/publishing/redis/decorator.py b/docs/docs_src/getting_started/publishing/redis/decorator.py new file mode 100644 index 0000000000..1a103c7a49 --- /dev/null +++ b/docs/docs_src/getting_started/publishing/redis/decorator.py @@ -0,0 +1,21 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +@broker.publisher("another-channel") +async def handle() -> str: + return "Hi!" + + +@broker.subscriber("another-channel") +async def handle_next(msg: str): + assert msg == "Hi!" + + +@app.after_startup +async def test(): + await broker.publish("", channel="test-channel") diff --git a/docs/docs_src/getting_started/publishing/redis/direct.py b/docs/docs_src/getting_started/publishing/redis/direct.py new file mode 100644 index 0000000000..effa6c42b3 --- /dev/null +++ b/docs/docs_src/getting_started/publishing/redis/direct.py @@ -0,0 +1,16 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +publisher = broker.publisher("another-channel") + +@broker.subscriber("test-channel") +async def handle(): + await publisher.publish("Hi!") + + +@broker.subscriber("another-channel") +async def handle_next(msg: str): + assert msg == "Hi!" diff --git a/docs/docs_src/getting_started/publishing/redis/direct_testing.py b/docs/docs_src/getting_started/publishing/redis/direct_testing.py new file mode 100644 index 0000000000..f09f45ff5f --- /dev/null +++ b/docs/docs_src/getting_started/publishing/redis/direct_testing.py @@ -0,0 +1,13 @@ +import pytest + +from faststream.redis import TestRedisBroker + +from .direct import broker, publisher + + +@pytest.mark.asyncio +async def test_handle(): + async with TestRedisBroker(broker) as br: + await br.publish("", channel="test-channel") + + publisher.mock.assert_called_once_with("Hi!") diff --git a/docs/docs_src/getting_started/publishing/redis/object.py b/docs/docs_src/getting_started/publishing/redis/object.py new file mode 100644 index 0000000000..ade6bef98d --- /dev/null +++ b/docs/docs_src/getting_started/publishing/redis/object.py @@ -0,0 +1,17 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +publisher = broker.publisher("another-channel") + +@publisher +@broker.subscriber("test-channel") +async def handle() -> str: + return "Hi!" + + +@broker.subscriber("another-channel") +async def handle_next(msg: str): + assert msg == "Hi!" diff --git a/docs/docs_src/getting_started/publishing/redis/object_testing.py b/docs/docs_src/getting_started/publishing/redis/object_testing.py new file mode 100644 index 0000000000..a23fec9158 --- /dev/null +++ b/docs/docs_src/getting_started/publishing/redis/object_testing.py @@ -0,0 +1,13 @@ +import pytest + +from faststream.redis import TestRedisBroker + +from .object import broker, publisher + + +@pytest.mark.asyncio +async def test_handle(): + async with TestRedisBroker(broker) as br: + await br.publish("", channel="test-channel") + + publisher.mock.assert_called_once_with("Hi!") diff --git a/docs/docs_src/getting_started/routers/kafka/__init__.py b/docs/docs_src/getting_started/routers/kafka/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/routers/router_kafka.py b/docs/docs_src/getting_started/routers/kafka/router.py similarity index 100% rename from docs/docs_src/getting_started/routers/router_kafka.py rename to docs/docs_src/getting_started/routers/kafka/router.py diff --git a/docs/docs_src/getting_started/routers/router_delay_kafka.py b/docs/docs_src/getting_started/routers/kafka/router_delay.py similarity index 100% rename from docs/docs_src/getting_started/routers/router_delay_kafka.py rename to docs/docs_src/getting_started/routers/kafka/router_delay.py diff --git a/docs/docs_src/getting_started/routers/nats/__init__.py b/docs/docs_src/getting_started/routers/nats/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/routers/router_nats.py b/docs/docs_src/getting_started/routers/nats/router.py similarity index 100% rename from docs/docs_src/getting_started/routers/router_nats.py rename to docs/docs_src/getting_started/routers/nats/router.py diff --git a/docs/docs_src/getting_started/routers/router_delay_nats.py b/docs/docs_src/getting_started/routers/nats/router_delay.py similarity index 100% rename from docs/docs_src/getting_started/routers/router_delay_nats.py rename to docs/docs_src/getting_started/routers/nats/router_delay.py diff --git a/docs/docs_src/getting_started/routers/rabbit/__init__.py b/docs/docs_src/getting_started/routers/rabbit/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/routers/router_rabbit.py b/docs/docs_src/getting_started/routers/rabbit/router.py similarity index 100% rename from docs/docs_src/getting_started/routers/router_rabbit.py rename to docs/docs_src/getting_started/routers/rabbit/router.py diff --git a/docs/docs_src/getting_started/routers/router_delay_rabbit.py b/docs/docs_src/getting_started/routers/rabbit/router_delay.py similarity index 100% rename from docs/docs_src/getting_started/routers/router_delay_rabbit.py rename to docs/docs_src/getting_started/routers/rabbit/router_delay.py diff --git a/docs/docs_src/getting_started/routers/redis/__init__.py b/docs/docs_src/getting_started/routers/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/routers/redis/router.py b/docs/docs_src/getting_started/routers/redis/router.py new file mode 100644 index 0000000000..b88af5ea27 --- /dev/null +++ b/docs/docs_src/getting_started/routers/redis/router.py @@ -0,0 +1,30 @@ +from faststream import FastStream +from faststream.redis import RedisBroker, RedisRouter + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) +router = RedisRouter(prefix="prefix_") + + +@router.subscriber("test-channel") +@router.publisher("another-channel") +async def handle(name: str, user_id: int) -> str: + assert name == "John" + assert user_id == 1 + return "Hi!" + + +@router.subscriber("another-channel") +async def handle_response(msg: str): + assert msg == "Hi!" + + +broker.include_router(router) + + +@app.after_startup +async def test(): + await broker.publish( + {"name": "John", "user_id": 1}, + channel="prefix_test-channel", + ) diff --git a/docs/docs_src/getting_started/routers/redis/router_delay.py b/docs/docs_src/getting_started/routers/redis/router_delay.py new file mode 100644 index 0000000000..33779b5630 --- /dev/null +++ b/docs/docs_src/getting_started/routers/redis/router_delay.py @@ -0,0 +1,20 @@ +from faststream import FastStream +from faststream.redis import RedisBroker, RedisRouter, RedisRoute + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +async def handle(name: str, user_id: int): + assert name == "John" + assert user_id == 1 + + +router = RedisRouter(handlers=(RedisRoute(handle, "test-channel"),)) + +broker.include_router(router) + + +@app.after_startup +async def test(): + await broker.publish({"name": "John", "user_id": 1}, channel="test-channel") diff --git a/docs/docs_src/getting_started/serialization/avro.py b/docs/docs_src/getting_started/serialization/avro.py index 4df3361195..357873d5a1 100644 --- a/docs/docs_src/getting_started/serialization/avro.py +++ b/docs/docs_src/getting_started/serialization/avro.py @@ -1,6 +1,7 @@ -import fastavro import io +import fastavro + from faststream import FastStream, Logger from faststream.kafka import KafkaBroker, KafkaMessage @@ -35,6 +36,7 @@ async def consume(name: str, age: int, logger: Logger): @app.after_startup async def publish(): msg = {"name": "John", "age": 25} + bytes_writer = io.BytesIO() fastavro.schemaless_writer(bytes_writer, schema, msg) raw_bytes = bytes_writer.getvalue() diff --git a/docs/docs_src/getting_started/serialization/parser_redis.py b/docs/docs_src/getting_started/serialization/parser_redis.py new file mode 100644 index 0000000000..b15e4ef023 --- /dev/null +++ b/docs/docs_src/getting_started/serialization/parser_redis.py @@ -0,0 +1,28 @@ +from typing import Awaitable, Callable + +from faststream import FastStream +from faststream.redis import RedisBroker, RedisMessage +from faststream.redis.message import PubSubMessage + + +async def custom_parser( + msg: PubSubMessage, + original_parser: Callable[[PubSubMessage], Awaitable[RedisMessage]], +) -> RedisMessage: + parsed_msg = await original_parser(msg) + parsed_msg.message_id = parsed_msg.headers["custom_message_id"] + return parsed_msg + + +broker = RedisBroker(parser=custom_parser) +app = FastStream(broker) + + +@broker.subscriber("test") +async def handle(): + ... + + +@app.after_startup +async def test(): + await broker.publish("", "test", headers={"custom_message_id": "1"}) diff --git a/docs/docs_src/getting_started/subscription/kafka/__init__.py b/docs/docs_src/getting_started/subscription/kafka/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/subscription/annotation_kafka.py b/docs/docs_src/getting_started/subscription/kafka/annotation.py similarity index 100% rename from docs/docs_src/getting_started/subscription/annotation_kafka.py rename to docs/docs_src/getting_started/subscription/kafka/annotation.py diff --git a/docs/docs_src/getting_started/subscription/filter_kafka.py b/docs/docs_src/getting_started/subscription/kafka/filter.py similarity index 100% rename from docs/docs_src/getting_started/subscription/filter_kafka.py rename to docs/docs_src/getting_started/subscription/kafka/filter.py diff --git a/docs/docs_src/getting_started/subscription/pydantic_fields_kafka.py b/docs/docs_src/getting_started/subscription/kafka/pydantic_fields.py similarity index 100% rename from docs/docs_src/getting_started/subscription/pydantic_fields_kafka.py rename to docs/docs_src/getting_started/subscription/kafka/pydantic_fields.py diff --git a/docs/docs_src/getting_started/subscription/pydantic_model_kafka.py b/docs/docs_src/getting_started/subscription/kafka/pydantic_model.py similarity index 100% rename from docs/docs_src/getting_started/subscription/pydantic_model_kafka.py rename to docs/docs_src/getting_started/subscription/kafka/pydantic_model.py diff --git a/docs/docs_src/getting_started/subscription/real_testing_kafka.py b/docs/docs_src/getting_started/subscription/kafka/real_testing.py similarity index 93% rename from docs/docs_src/getting_started/subscription/real_testing_kafka.py rename to docs/docs_src/getting_started/subscription/kafka/real_testing.py index eee69035b9..0cf374b233 100644 --- a/docs/docs_src/getting_started/subscription/real_testing_kafka.py +++ b/docs/docs_src/getting_started/subscription/kafka/real_testing.py @@ -3,7 +3,7 @@ from faststream.kafka import TestKafkaBroker -from .pydantic_fields_kafka import broker, handle +from .pydantic_fields import broker, handle @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/subscription/testing_kafka.py b/docs/docs_src/getting_started/subscription/kafka/testing.py similarity index 93% rename from docs/docs_src/getting_started/subscription/testing_kafka.py rename to docs/docs_src/getting_started/subscription/kafka/testing.py index 683a89fe02..e1f6241276 100644 --- a/docs/docs_src/getting_started/subscription/testing_kafka.py +++ b/docs/docs_src/getting_started/subscription/kafka/testing.py @@ -3,7 +3,7 @@ from faststream.kafka import TestKafkaBroker -from .annotation_kafka import broker, handle +from .annotation import broker, handle @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/subscription/nats/__init__.py b/docs/docs_src/getting_started/subscription/nats/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/subscription/annotation_nats.py b/docs/docs_src/getting_started/subscription/nats/annotation.py similarity index 100% rename from docs/docs_src/getting_started/subscription/annotation_nats.py rename to docs/docs_src/getting_started/subscription/nats/annotation.py diff --git a/docs/docs_src/getting_started/subscription/filter_nats.py b/docs/docs_src/getting_started/subscription/nats/filter.py similarity index 100% rename from docs/docs_src/getting_started/subscription/filter_nats.py rename to docs/docs_src/getting_started/subscription/nats/filter.py diff --git a/docs/docs_src/getting_started/subscription/pydantic_fields_nats.py b/docs/docs_src/getting_started/subscription/nats/pydantic_fields.py similarity index 100% rename from docs/docs_src/getting_started/subscription/pydantic_fields_nats.py rename to docs/docs_src/getting_started/subscription/nats/pydantic_fields.py diff --git a/docs/docs_src/getting_started/subscription/pydantic_model_nats.py b/docs/docs_src/getting_started/subscription/nats/pydantic_model.py similarity index 100% rename from docs/docs_src/getting_started/subscription/pydantic_model_nats.py rename to docs/docs_src/getting_started/subscription/nats/pydantic_model.py diff --git a/docs/docs_src/getting_started/subscription/real_testing_nats.py b/docs/docs_src/getting_started/subscription/nats/real_testing.py similarity index 94% rename from docs/docs_src/getting_started/subscription/real_testing_nats.py rename to docs/docs_src/getting_started/subscription/nats/real_testing.py index dedfd6979d..5e9d6e4567 100644 --- a/docs/docs_src/getting_started/subscription/real_testing_nats.py +++ b/docs/docs_src/getting_started/subscription/nats/real_testing.py @@ -3,7 +3,7 @@ from faststream.nats import TestNatsBroker -from .pydantic_fields_nats import broker, handle +from .pydantic_fields import broker, handle @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/subscription/testing_nats.py b/docs/docs_src/getting_started/subscription/nats/testing.py similarity index 93% rename from docs/docs_src/getting_started/subscription/testing_nats.py rename to docs/docs_src/getting_started/subscription/nats/testing.py index c4456d804d..0f7560e043 100644 --- a/docs/docs_src/getting_started/subscription/testing_nats.py +++ b/docs/docs_src/getting_started/subscription/nats/testing.py @@ -3,7 +3,7 @@ from faststream.nats import TestNatsBroker -from .annotation_nats import broker, handle +from .annotation import broker, handle @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/subscription/rabbit/__init__.py b/docs/docs_src/getting_started/subscription/rabbit/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/subscription/annotation_rabbit.py b/docs/docs_src/getting_started/subscription/rabbit/annotation.py similarity index 100% rename from docs/docs_src/getting_started/subscription/annotation_rabbit.py rename to docs/docs_src/getting_started/subscription/rabbit/annotation.py diff --git a/docs/docs_src/getting_started/subscription/filter_rabbit.py b/docs/docs_src/getting_started/subscription/rabbit/filter.py similarity index 100% rename from docs/docs_src/getting_started/subscription/filter_rabbit.py rename to docs/docs_src/getting_started/subscription/rabbit/filter.py diff --git a/docs/docs_src/getting_started/subscription/pydantic_fields_rabbit.py b/docs/docs_src/getting_started/subscription/rabbit/pydantic_fields.py similarity index 100% rename from docs/docs_src/getting_started/subscription/pydantic_fields_rabbit.py rename to docs/docs_src/getting_started/subscription/rabbit/pydantic_fields.py diff --git a/docs/docs_src/getting_started/subscription/pydantic_model_rabbit.py b/docs/docs_src/getting_started/subscription/rabbit/pydantic_model.py similarity index 100% rename from docs/docs_src/getting_started/subscription/pydantic_model_rabbit.py rename to docs/docs_src/getting_started/subscription/rabbit/pydantic_model.py diff --git a/docs/docs_src/getting_started/subscription/real_testing_rabbit.py b/docs/docs_src/getting_started/subscription/rabbit/real_testing.py similarity index 93% rename from docs/docs_src/getting_started/subscription/real_testing_rabbit.py rename to docs/docs_src/getting_started/subscription/rabbit/real_testing.py index 3d220e6b80..900b6046e7 100644 --- a/docs/docs_src/getting_started/subscription/real_testing_rabbit.py +++ b/docs/docs_src/getting_started/subscription/rabbit/real_testing.py @@ -3,7 +3,7 @@ from faststream.rabbit import TestRabbitBroker -from .pydantic_fields_rabbit import broker, handle +from .pydantic_fields import broker, handle @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/subscription/testing_rabbit.py b/docs/docs_src/getting_started/subscription/rabbit/testing.py similarity index 93% rename from docs/docs_src/getting_started/subscription/testing_rabbit.py rename to docs/docs_src/getting_started/subscription/rabbit/testing.py index faf40bc30d..78425924da 100644 --- a/docs/docs_src/getting_started/subscription/testing_rabbit.py +++ b/docs/docs_src/getting_started/subscription/rabbit/testing.py @@ -3,7 +3,7 @@ from faststream.rabbit import TestRabbitBroker -from .annotation_rabbit import broker, handle +from .annotation import broker, handle @pytest.mark.asyncio diff --git a/docs/docs_src/getting_started/subscription/redis/__init__.py b/docs/docs_src/getting_started/subscription/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/subscription/redis/annotation.py b/docs/docs_src/getting_started/subscription/redis/annotation.py new file mode 100644 index 0000000000..f54b186fff --- /dev/null +++ b/docs/docs_src/getting_started/subscription/redis/annotation.py @@ -0,0 +1,11 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle(name: str, user_id: int): + assert name == "John" + assert user_id == 1 diff --git a/docs/docs_src/getting_started/subscription/redis/filter.py b/docs/docs_src/getting_started/subscription/redis/filter.py new file mode 100644 index 0000000000..02a017c8a9 --- /dev/null +++ b/docs/docs_src/getting_started/subscription/redis/filter.py @@ -0,0 +1,32 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber( + "test-channel", + filter=lambda msg: msg.content_type == "application/json", +) +async def handle(name: str, user_id: int): + assert name == "John" + assert user_id == 1 + + +@broker.subscriber("test-channel") +async def default_handler(msg: str): + assert msg == "Hello, FastStream!" + + +@app.after_startup +async def test(): + await broker.publish( + {"name": "John", "user_id": 1}, + channel="test-channel", + ) + + await broker.publish( + "Hello, FastStream!", + channel="test-channel", + ) diff --git a/docs/docs_src/getting_started/subscription/redis/pydantic_fields.py b/docs/docs_src/getting_started/subscription/redis/pydantic_fields.py new file mode 100644 index 0000000000..18656207ef --- /dev/null +++ b/docs/docs_src/getting_started/subscription/redis/pydantic_fields.py @@ -0,0 +1,20 @@ +from pydantic import Field, NonNegativeInt + +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("test-channel") +async def handle( + name: str = Field( + ..., examples=["John"], description="Registered user name" + ), + user_id: NonNegativeInt = Field( + ..., examples=[1], description="Registered user id" + ), +): + assert name == "John" + assert user_id == 1 diff --git a/docs/docs_src/getting_started/subscription/redis/pydantic_model.py b/docs/docs_src/getting_started/subscription/redis/pydantic_model.py new file mode 100644 index 0000000000..0e5e27d7e8 --- /dev/null +++ b/docs/docs_src/getting_started/subscription/redis/pydantic_model.py @@ -0,0 +1,22 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + + +class UserInfo(BaseModel): + name: str = Field( + ..., examples=["John"], description="Registered user name" + ) + user_id: NonNegativeInt = Field( + ..., examples=[1], description="Registered user id" + ) + + +@broker.subscriber("test-channel") +async def handle(user: UserInfo): + assert user.name == "John" + assert user.user_id == 1 diff --git a/docs/docs_src/getting_started/subscription/redis/real_testing.py b/docs/docs_src/getting_started/subscription/redis/real_testing.py new file mode 100644 index 0000000000..b2c05c203e --- /dev/null +++ b/docs/docs_src/getting_started/subscription/redis/real_testing.py @@ -0,0 +1,25 @@ +import pytest +from pydantic import ValidationError + +from faststream.redis import TestRedisBroker + +from .pydantic_fields import broker, handle + + +@pytest.mark.asyncio +async def test_handle(): + async with TestRedisBroker(broker, with_real=True) as br: + await br.publish({"name": "John", "user_id": 1}, channel="test-channel") + await handle.wait_call(timeout=3) + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + + assert handle.mock is None + +@pytest.mark.asyncio +async def test_validation_error(): + async with TestRedisBroker(broker, with_real=True) as br: + with pytest.raises(ValidationError): + await br.publish("wrong message", channel="test-channel") + await handle.wait_call(timeout=3) + + handle.mock.assert_called_once_with("wrong message") diff --git a/docs/docs_src/getting_started/subscription/redis/testing.py b/docs/docs_src/getting_started/subscription/redis/testing.py new file mode 100644 index 0000000000..4934366f75 --- /dev/null +++ b/docs/docs_src/getting_started/subscription/redis/testing.py @@ -0,0 +1,24 @@ +import pytest +from pydantic import ValidationError + +from faststream.redis import TestRedisBroker + +from .annotation import broker, handle + + +@pytest.mark.asyncio +async def test_handle(): + async with TestRedisBroker(broker) as br: + await br.publish({"name": "John", "user_id": 1}, channel="test-channel") + + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + + assert handle.mock is None + +@pytest.mark.asyncio +async def test_validation_error(): + async with TestRedisBroker(broker) as br: + with pytest.raises(ValidationError): + await br.publish("wrong message", channel="test-channel") + + handle.mock.assert_called_once_with("wrong message") diff --git a/docs/docs_src/index/kafka/__init__.py b/docs/docs_src/index/kafka/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/index/basic_kafka.py b/docs/docs_src/index/kafka/basic.py similarity index 100% rename from docs/docs_src/index/basic_kafka.py rename to docs/docs_src/index/kafka/basic.py diff --git a/docs/docs_src/index/pydantic_kafka.py b/docs/docs_src/index/kafka/pydantic.py similarity index 100% rename from docs/docs_src/index/pydantic_kafka.py rename to docs/docs_src/index/kafka/pydantic.py diff --git a/docs/docs_src/index/test_kafka.py b/docs/docs_src/index/kafka/test.py similarity index 93% rename from docs/docs_src/index/test_kafka.py rename to docs/docs_src/index/kafka/test.py index 409113c5ed..bfd740312c 100644 --- a/docs/docs_src/index/test_kafka.py +++ b/docs/docs_src/index/kafka/test.py @@ -1,4 +1,4 @@ -from .pydantic_kafka import broker +from .pydantic import broker import pytest import pydantic diff --git a/docs/docs_src/index/nats/__init__.py b/docs/docs_src/index/nats/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/index/basic_nats.py b/docs/docs_src/index/nats/basic.py similarity index 100% rename from docs/docs_src/index/basic_nats.py rename to docs/docs_src/index/nats/basic.py diff --git a/docs/docs_src/index/pydantic_nats.py b/docs/docs_src/index/nats/pydantic.py similarity index 100% rename from docs/docs_src/index/pydantic_nats.py rename to docs/docs_src/index/nats/pydantic.py diff --git a/docs/docs_src/index/test_nats.py b/docs/docs_src/index/nats/test.py similarity index 93% rename from docs/docs_src/index/test_nats.py rename to docs/docs_src/index/nats/test.py index 29b28536c0..85b2e6de76 100644 --- a/docs/docs_src/index/test_nats.py +++ b/docs/docs_src/index/nats/test.py @@ -1,4 +1,4 @@ -from .pydantic_nats import broker +from .pydantic import broker import pytest import pydantic diff --git a/docs/docs_src/index/rabbit/__init__.py b/docs/docs_src/index/rabbit/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/index/basic_rabbit.py b/docs/docs_src/index/rabbit/basic.py similarity index 100% rename from docs/docs_src/index/basic_rabbit.py rename to docs/docs_src/index/rabbit/basic.py diff --git a/docs/docs_src/index/pydantic_rabbit.py b/docs/docs_src/index/rabbit/pydantic.py similarity index 100% rename from docs/docs_src/index/pydantic_rabbit.py rename to docs/docs_src/index/rabbit/pydantic.py diff --git a/docs/docs_src/index/test_rabbit.py b/docs/docs_src/index/rabbit/test.py similarity index 93% rename from docs/docs_src/index/test_rabbit.py rename to docs/docs_src/index/rabbit/test.py index f730086349..a193db35b2 100644 --- a/docs/docs_src/index/test_rabbit.py +++ b/docs/docs_src/index/rabbit/test.py @@ -1,4 +1,4 @@ -from .pydantic_rabbit import broker +from .pydantic import broker import pytest import pydantic diff --git a/docs/docs_src/index/redis/__init__.py b/docs/docs_src/index/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/index/redis/basic.py b/docs/docs_src/index/redis/basic.py new file mode 100644 index 0000000000..879389ca2f --- /dev/null +++ b/docs/docs_src/index/redis/basic.py @@ -0,0 +1,10 @@ +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +@broker.subscriber("in-channel") +@broker.publisher("out-channel") +async def handle_msg(user: str, user_id: int) -> str: + return f"User: {user_id} - {user} registered" diff --git a/docs/docs_src/index/redis/pydantic.py b/docs/docs_src/index/redis/pydantic.py new file mode 100644 index 0000000000..a5bc99cce6 --- /dev/null +++ b/docs/docs_src/index/redis/pydantic.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel, Field, PositiveInt +from faststream import FastStream +from faststream.redis import RedisBroker + +broker = RedisBroker("redis://localhost:6379") +app = FastStream(broker) + +class User(BaseModel): + user: str = Field(..., examples=["John"]) + user_id: PositiveInt = Field(..., examples=["1"]) + +@broker.subscriber("in-channel") +@broker.publisher("out-channel") +async def handle_msg(data: User) -> str: + return f"User: {data.user} - {data.user_id} registered" diff --git a/docs/docs_src/index/redis/test.py b/docs/docs_src/index/redis/test.py new file mode 100644 index 0000000000..9a14ba4190 --- /dev/null +++ b/docs/docs_src/index/redis/test.py @@ -0,0 +1,20 @@ +from .pydantic import broker + +import pytest +import pydantic +from faststream.redis import TestRedisBroker + + +@pytest.mark.asyncio +async def test_correct(): + async with TestRedisBroker(broker) as br: + await br.publish({ + "user": "John", + "user_id": 1, + }, "in-channel") + +@pytest.mark.asyncio +async def test_invalid(): + async with TestRedisBroker(broker) as br: + with pytest.raises(pydantic.ValidationError): + await br.publish("wrong message", "in-channel") diff --git a/docs/docs_src/integrations/fastapi/kafka/__init__.py b/docs/docs_src/integrations/fastapi/kafka/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/integrations/fastapi/base_kafka.py b/docs/docs_src/integrations/fastapi/kafka/base.py similarity index 100% rename from docs/docs_src/integrations/fastapi/base_kafka.py rename to docs/docs_src/integrations/fastapi/kafka/base.py diff --git a/docs/docs_src/integrations/fastapi/depends_kafka.py b/docs/docs_src/integrations/fastapi/kafka/depends.py similarity index 100% rename from docs/docs_src/integrations/fastapi/depends_kafka.py rename to docs/docs_src/integrations/fastapi/kafka/depends.py diff --git a/docs/docs_src/integrations/fastapi/send_kafka.py b/docs/docs_src/integrations/fastapi/kafka/send.py similarity index 100% rename from docs/docs_src/integrations/fastapi/send_kafka.py rename to docs/docs_src/integrations/fastapi/kafka/send.py diff --git a/docs/docs_src/integrations/fastapi/startup_kafka.py b/docs/docs_src/integrations/fastapi/kafka/startup.py similarity index 100% rename from docs/docs_src/integrations/fastapi/startup_kafka.py rename to docs/docs_src/integrations/fastapi/kafka/startup.py diff --git a/docs/docs_src/integrations/fastapi/test_kafka.py b/docs/docs_src/integrations/fastapi/kafka/test.py similarity index 100% rename from docs/docs_src/integrations/fastapi/test_kafka.py rename to docs/docs_src/integrations/fastapi/kafka/test.py diff --git a/docs/docs_src/integrations/fastapi/nats/__init__.py b/docs/docs_src/integrations/fastapi/nats/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/integrations/fastapi/base_nats.py b/docs/docs_src/integrations/fastapi/nats/base.py similarity index 100% rename from docs/docs_src/integrations/fastapi/base_nats.py rename to docs/docs_src/integrations/fastapi/nats/base.py diff --git a/docs/docs_src/integrations/fastapi/depends_nats.py b/docs/docs_src/integrations/fastapi/nats/depends.py similarity index 100% rename from docs/docs_src/integrations/fastapi/depends_nats.py rename to docs/docs_src/integrations/fastapi/nats/depends.py diff --git a/docs/docs_src/integrations/fastapi/send_nats.py b/docs/docs_src/integrations/fastapi/nats/send.py similarity index 100% rename from docs/docs_src/integrations/fastapi/send_nats.py rename to docs/docs_src/integrations/fastapi/nats/send.py diff --git a/docs/docs_src/integrations/fastapi/startup_nats.py b/docs/docs_src/integrations/fastapi/nats/startup.py similarity index 100% rename from docs/docs_src/integrations/fastapi/startup_nats.py rename to docs/docs_src/integrations/fastapi/nats/startup.py diff --git a/docs/docs_src/integrations/fastapi/test_nats.py b/docs/docs_src/integrations/fastapi/nats/test.py similarity index 100% rename from docs/docs_src/integrations/fastapi/test_nats.py rename to docs/docs_src/integrations/fastapi/nats/test.py diff --git a/docs/docs_src/integrations/fastapi/rabbit/__init__.py b/docs/docs_src/integrations/fastapi/rabbit/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/integrations/fastapi/base_rabbit.py b/docs/docs_src/integrations/fastapi/rabbit/base.py similarity index 100% rename from docs/docs_src/integrations/fastapi/base_rabbit.py rename to docs/docs_src/integrations/fastapi/rabbit/base.py diff --git a/docs/docs_src/integrations/fastapi/depends_rabbit.py b/docs/docs_src/integrations/fastapi/rabbit/depends.py similarity index 100% rename from docs/docs_src/integrations/fastapi/depends_rabbit.py rename to docs/docs_src/integrations/fastapi/rabbit/depends.py diff --git a/docs/docs_src/integrations/fastapi/send_rabbit.py b/docs/docs_src/integrations/fastapi/rabbit/send.py similarity index 100% rename from docs/docs_src/integrations/fastapi/send_rabbit.py rename to docs/docs_src/integrations/fastapi/rabbit/send.py diff --git a/docs/docs_src/integrations/fastapi/startup_rabbit.py b/docs/docs_src/integrations/fastapi/rabbit/startup.py similarity index 100% rename from docs/docs_src/integrations/fastapi/startup_rabbit.py rename to docs/docs_src/integrations/fastapi/rabbit/startup.py diff --git a/docs/docs_src/integrations/fastapi/test_rabbit.py b/docs/docs_src/integrations/fastapi/rabbit/test.py similarity index 100% rename from docs/docs_src/integrations/fastapi/test_rabbit.py rename to docs/docs_src/integrations/fastapi/rabbit/test.py diff --git a/docs/docs_src/integrations/fastapi/redis/__init__.py b/docs/docs_src/integrations/fastapi/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/integrations/fastapi/redis/base.py b/docs/docs_src/integrations/fastapi/redis/base.py new file mode 100644 index 0000000000..e1b3b6e9f4 --- /dev/null +++ b/docs/docs_src/integrations/fastapi/redis/base.py @@ -0,0 +1,29 @@ +from fastapi import Depends, FastAPI +from pydantic import BaseModel + +from faststream.redis.fastapi import RedisRouter + +router = RedisRouter("redis://localhost:6379") + + +class Incoming(BaseModel): + m: dict + + +def call(): + return True + + +@router.subscriber("test") +@router.publisher("response") +async def hello(m: Incoming, d=Depends(call)): + return {"response": "Hello, Redis!"} + + +@router.get("/") +async def hello_http(): + return "Hello, HTTP!" + + +app = FastAPI(lifespan=router.lifespan_context) +app.include_router(router) diff --git a/docs/docs_src/integrations/fastapi/redis/depends.py b/docs/docs_src/integrations/fastapi/redis/depends.py new file mode 100644 index 0000000000..11c3f82c46 --- /dev/null +++ b/docs/docs_src/integrations/fastapi/redis/depends.py @@ -0,0 +1,21 @@ +from fastapi import Depends, FastAPI +from typing_extensions import Annotated + +from faststream.redis import RedisBroker, fastapi + +router = fastapi.RedisRouter("redis://localhost:6379") + +app = FastAPI(lifespan=router.lifespan_context) + + +def broker(): + return router.broker + + +@router.get("/") +async def hello_http(broker: Annotated[RedisBroker, Depends(broker)]): + await broker.publish("Hello, Redis!", "test") + return "Hello, HTTP!" + + +app.include_router(router) diff --git a/docs/docs_src/integrations/fastapi/redis/send.py b/docs/docs_src/integrations/fastapi/redis/send.py new file mode 100644 index 0000000000..9dffe82e6d --- /dev/null +++ b/docs/docs_src/integrations/fastapi/redis/send.py @@ -0,0 +1,16 @@ +from fastapi import FastAPI + +from faststream.redis.fastapi import RedisRouter + +router = RedisRouter("redis://localhost:6379") + +app = FastAPI(lifespan=router.lifespan_context) + + +@router.get("/") +async def hello_http(): + await router.broker.publish("Hello, Redis!", "test") + return "Hello, HTTP!" + + +app.include_router(router) diff --git a/docs/docs_src/integrations/fastapi/redis/startup.py b/docs/docs_src/integrations/fastapi/redis/startup.py new file mode 100644 index 0000000000..00dc0906aa --- /dev/null +++ b/docs/docs_src/integrations/fastapi/redis/startup.py @@ -0,0 +1,19 @@ +from fastapi import FastAPI + +from faststream.redis.fastapi import RedisRouter + +router = RedisRouter("redis://localhost:6379") + + +@router.subscriber("test") +async def hello(msg: str): + return {"response": "Hello, Redis!"} + + +@router.after_startup +async def test(app: FastAPI): + await router.broker.publish("Hello!", "test") + + +app = FastAPI(lifespan=router.lifespan_context) +app.include_router(router) diff --git a/docs/docs_src/integrations/fastapi/redis/test.py b/docs/docs_src/integrations/fastapi/redis/test.py new file mode 100644 index 0000000000..8f9965a59f --- /dev/null +++ b/docs/docs_src/integrations/fastapi/redis/test.py @@ -0,0 +1,18 @@ +import pytest + +from faststream.redis import TestRedisBroker, fastapi + +router = fastapi.RedisRouter() + + +@router.subscriber("test") +async def handler(msg: str): + ... + + +@pytest.mark.asyncio +async def test_router(): + async with TestRedisBroker(router.broker) as br: + await br.publish("Hi!", "test") + + handler.mock.assert_called_once_with("Hi!") diff --git a/docs/docs_src/redis/ack/__init__.py b/docs/docs_src/redis/ack/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/ack/errors.py b/docs/docs_src/redis/ack/errors.py new file mode 100644 index 0000000000..dd69a22581 --- /dev/null +++ b/docs/docs_src/redis/ack/errors.py @@ -0,0 +1,21 @@ +from faststream import FastStream +from faststream.exceptions import AckMessage +from faststream.redis import RedisBroker, StreamSub + +broker = RedisBroker("localhost:6379") +app = FastStream(broker) + + +@broker.subscriber(stream=StreamSub("test-stream", group="test-group", consumer="1")) +async def handle(body): + processing_logic(body) + + +def processing_logic(body): + if True: + raise AckMessage() + + +@app.after_startup +async def test_publishing(): + await broker.publish("Hello World!", "test-stream") diff --git a/docs/docs_src/redis/list_pub/__init__.py b/docs/docs_src/redis/list_pub/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/list_pub/app.py b/docs/docs_src/redis/list_pub/app.py new file mode 100644 index 0000000000..4fc51ab808 --- /dev/null +++ b/docs/docs_src/redis/list_pub/app.py @@ -0,0 +1,20 @@ +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = RedisBroker("localhost:6379") +app = FastStream(broker) + + +@broker.subscriber(list="input-list") +@broker.publisher(list="output-list") +async def on_input_data(msg: Data) -> Data: + return Data(data=msg.data + 1.0) diff --git a/docs/docs_src/redis/list_sub/__init__.py b/docs/docs_src/redis/list_sub/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/list_sub/app.py b/docs/docs_src/redis/list_sub/app.py new file mode 100644 index 0000000000..ea69327f1a --- /dev/null +++ b/docs/docs_src/redis/list_sub/app.py @@ -0,0 +1,10 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(list="test-list") +async def handle(msg: str, logger: Logger): + logger.info(msg) diff --git a/docs/docs_src/redis/list_sub_batch/__init__.py b/docs/docs_src/redis/list_sub_batch/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/list_sub_batch/app.py b/docs/docs_src/redis/list_sub_batch/app.py new file mode 100644 index 0000000000..f2fac7a04d --- /dev/null +++ b/docs/docs_src/redis/list_sub_batch/app.py @@ -0,0 +1,10 @@ +from faststream import FastStream, Logger +from faststream.redis import ListSub, RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(list=ListSub("test-list", batch=True)) +async def handle(msg: list[str], logger: Logger): + logger.info(msg) diff --git a/docs/docs_src/redis/publish/__init__.py b/docs/docs_src/redis/publish/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/publish/publisher_decorator.py b/docs/docs_src/redis/publish/publisher_decorator.py new file mode 100644 index 0000000000..7b06191447 --- /dev/null +++ b/docs/docs_src/redis/publish/publisher_decorator.py @@ -0,0 +1,23 @@ +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = RedisBroker("localhost:6379") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@to_output_data +@broker.subscriber("input_data") +async def on_input_data(msg: Data) -> Data: + return Data(data=msg.data + 1.0) diff --git a/docs/docs_src/redis/publish/publisher_object.py b/docs/docs_src/redis/publish/publisher_object.py new file mode 100644 index 0000000000..3acc9f0995 --- /dev/null +++ b/docs/docs_src/redis/publish/publisher_object.py @@ -0,0 +1,33 @@ +import pytest +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream, Logger +from faststream._compat import model_to_json +from faststream.redis import RedisBroker, TestRedisBroker + +broker = RedisBroker("localhost:6379") +app = FastStream(broker) + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + +prepared_publisher = broker.publisher("input_data") + +@broker.subscriber("input_data") +async def handle_data(msg: Data, logger: Logger) -> None: + logger.info(f"handle_data({msg=})") + +@pytest.mark.asyncio +async def test_prepared_publish(): + async with TestRedisBroker(broker): + msg = Data(data=0.5) + + await prepared_publisher.publish( + model_to_json(msg), + headers={"content-type": "application/json"}, + ) + + handle_data.mock.assert_called_once_with(dict(msg)) diff --git a/docs/docs_src/redis/publish/raw_publish.py b/docs/docs_src/redis/publish/raw_publish.py new file mode 100644 index 0000000000..3855e1ebf7 --- /dev/null +++ b/docs/docs_src/redis/publish/raw_publish.py @@ -0,0 +1,35 @@ +import pytest +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream, Logger +from faststream._compat import model_to_json +from faststream.redis import RedisBroker, TestRedisBroker + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = RedisBroker("localhost:6379") +app = FastStream(broker) + + +@broker.subscriber("input_data") +async def on_input_data(msg: Data, logger: Logger) -> Data: + logger.info(f"on_input_data({msg=})") + + +@pytest.mark.asyncio +async def test_raw_publish(): + async with TestRedisBroker(broker): + msg = Data(data=0.5) + + await broker.publish( + model_to_json(msg), + "input_data", + headers={"content-type": "application/json"}, + ) + + on_input_data.mock.assert_called_once_with(dict(msg)) diff --git a/docs/docs_src/redis/rpc/__init__.py b/docs/docs_src/redis/rpc/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/rpc/app.py b/docs/docs_src/redis/rpc/app.py new file mode 100644 index 0000000000..c281d77729 --- /dev/null +++ b/docs/docs_src/redis/rpc/app.py @@ -0,0 +1,49 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker("localhost:6379") +app = FastStream(broker) + + +@broker.subscriber(channel="test-channel") +async def handle_channel(msg: str, logger: Logger): + logger.info(msg) + return msg + + +@broker.subscriber(list="test-list") +async def handle_list(msg: str, logger: Logger): + logger.info(msg) + return msg + + +@broker.subscriber(stream="test-stream") +async def handle_stream(msg: str, logger: Logger): + logger.info(msg) + return msg + + +@app.after_startup +async def t(): + msg = "Hi!" + + assert msg == await broker.publish( + "Hi!", + channel="test-channel", + rpc=True, + rpc_timeout=3.0, + ) + + assert msg == await broker.publish( + "Hi!", + list="test-list", + rpc=True, + rpc_timeout=3.0, + ) + + assert msg == await broker.publish( + "Hi!", + stream="test-stream", + rpc=True, + rpc_timeout=3.0, + ) diff --git a/docs/docs_src/redis/stream_group/__init__.py b/docs/docs_src/redis/stream_group/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/stream_group/app.py b/docs/docs_src/redis/stream_group/app.py new file mode 100644 index 0000000000..c9866a8a40 --- /dev/null +++ b/docs/docs_src/redis/stream_group/app.py @@ -0,0 +1,15 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker, StreamSub + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(stream=StreamSub("test-stream", group="test-group", consumer="1")) +async def handle(msg: str, logger: Logger): + logger.info(msg) + + +@app.after_startup +async def t(): + await broker.publish("Hi!", stream="test-stream") diff --git a/docs/docs_src/redis/stream_pub/__init__.py b/docs/docs_src/redis/stream_pub/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/stream_pub/app.py b/docs/docs_src/redis/stream_pub/app.py new file mode 100644 index 0000000000..98f95f06f4 --- /dev/null +++ b/docs/docs_src/redis/stream_pub/app.py @@ -0,0 +1,20 @@ +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = RedisBroker("localhost:6379") +app = FastStream(broker) + + +@broker.subscriber(stream="input-stream") +@broker.publisher(stream="output-stream") +async def on_input_data(msg: Data) -> Data: + return Data(data=msg.data + 1.0) diff --git a/docs/docs_src/redis/stream_sub/__init__.py b/docs/docs_src/redis/stream_sub/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/stream_sub/app.py b/docs/docs_src/redis/stream_sub/app.py new file mode 100644 index 0000000000..364748a97e --- /dev/null +++ b/docs/docs_src/redis/stream_sub/app.py @@ -0,0 +1,10 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(stream="test-stream") +async def handle(msg: str, logger: Logger): + logger.info(msg) diff --git a/docs/docs_src/redis/stream_sub_batch/__init__.py b/docs/docs_src/redis/stream_sub_batch/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/stream_sub_batch/app.py b/docs/docs_src/redis/stream_sub_batch/app.py new file mode 100644 index 0000000000..fa29f16825 --- /dev/null +++ b/docs/docs_src/redis/stream_sub_batch/app.py @@ -0,0 +1,10 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker, StreamSub + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(stream=StreamSub("test-stream", batch=True)) +async def handle(msg: list[str], logger: Logger): + logger.info(msg) diff --git a/docs/docs_src/redis/subscribe/__init__.py b/docs/docs_src/redis/subscribe/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/redis/subscribe/channel_sub.py b/docs/docs_src/redis/subscribe/channel_sub.py new file mode 100644 index 0000000000..a14bbdf48a --- /dev/null +++ b/docs/docs_src/redis/subscribe/channel_sub.py @@ -0,0 +1,10 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber("test") +async def handle(msg: str, logger: Logger): + logger.info(msg) diff --git a/docs/docs_src/redis/subscribe/channel_sub_pattern.py b/docs/docs_src/redis/subscribe/channel_sub_pattern.py new file mode 100644 index 0000000000..87a55c4622 --- /dev/null +++ b/docs/docs_src/redis/subscribe/channel_sub_pattern.py @@ -0,0 +1,10 @@ +from faststream import FastStream, Logger, Path +from faststream.redis import PubSub, RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(channel=PubSub("test.*", pattern=True)) +async def handle_test(msg: str, logger: Logger): + logger.info(msg) diff --git a/docs/expand_markdown.py b/docs/expand_markdown.py index 85b991082b..a3d833a53b 100644 --- a/docs/expand_markdown.py +++ b/docs/expand_markdown.py @@ -48,7 +48,8 @@ def extract_lines(embedded_line): elif Path("./docs_src").exists(): to_expand_path = Path("./") / to_expand_path else: - raise ValueError(f"Couldn't find docs_src directory") + raise ValueError("Couldn't find docs_src directory") + return read_lines_from_file(to_expand_path, lines_spec) diff --git a/docs/includes/docker-compose.yaml b/docs/includes/docker-compose.yaml index 780dfaec45..4ed4ceef61 100644 --- a/docs/includes/docker-compose.yaml +++ b/docs/includes/docker-compose.yaml @@ -37,3 +37,11 @@ services: # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges security_opt: - no-new-privileges:true + # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service + redis: + image: redis:alpine + ports: + - 6379:6379 + # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges + security_opt: + - no-new-privileges:true diff --git a/docs/includes/en/watchfiles.md b/docs/includes/en/watchfiles.md new file mode 100644 index 0000000000..e521322acb --- /dev/null +++ b/docs/includes/en/watchfiles.md @@ -0,0 +1,5 @@ +Please, install [`watchfiles`](https://github.com/samuelcolvin/watchfiles){.external-link target="_blank"} if you want to use `--reload` feature + +```shell +pip install watchfiles +``` diff --git a/docs/includes/getting_started/cli/env.md b/docs/includes/getting_started/cli/env.md index 2524c5b93c..12a04d7167 100644 --- a/docs/includes/getting_started/cli/env.md +++ b/docs/includes/getting_started/cli/env.md @@ -12,3 +12,8 @@ ```python linenums="1" hl_lines="14-16" {!> docs_src/getting_started/cli/nats_context.py!} ``` + +=== "Redis" + ```python linenums="1" hl_lines="14-16" + {!> docs_src/getting_started/cli/redis_context.py!} + ``` diff --git a/docs/includes/getting_started/context/access.md b/docs/includes/getting_started/context/access.md index d2994baddc..b446ab2279 100644 --- a/docs/includes/getting_started/context/access.md +++ b/docs/includes/getting_started/context/access.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="1 12-15" - {!> docs_src/getting_started/context/existed_context_kafka.py [ln:1-2,10-11,14-23] !} + {!> docs_src/getting_started/context/kafka/existed_context.py [ln:1-2,10-11,14-23] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="1 12-15" - {!> docs_src/getting_started/context/existed_context_rabbit.py [ln:1-2,10-11,14-23] !} + {!> docs_src/getting_started/context/rabbit/existed_context.py [ln:1-2,10-11,14-23] !} ``` === "NATS" ```python linenums="1" hl_lines="1 12-15" - {!> docs_src/getting_started/context/existed_context_nats.py [ln:1-2,10-11,14-23] !} + {!> docs_src/getting_started/context/nats/existed_context.py [ln:1-2,10-11,14-23] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="1 12-15" + {!> docs_src/getting_started/context/redis/existed_context.py [ln:1-2,10-11,14-23] !} ``` diff --git a/docs/includes/getting_started/context/annotated.md b/docs/includes/getting_started/context/annotated.md index 1572475650..ea532300c6 100644 --- a/docs/includes/getting_started/context/annotated.md +++ b/docs/includes/getting_started/context/annotated.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="1 7 16" - {!> docs_src/getting_started/context/annotated_kafka.py !} + {!> docs_src/getting_started/context/kafka/annotated.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="1 7 16" - {!> docs_src/getting_started/context/annotated_rabbit.py !} + {!> docs_src/getting_started/context/rabbit/annotated.py !} ``` === "NATS" ```python linenums="1" hl_lines="1 7 16" - {!> docs_src/getting_started/context/annotated_nats.py !} + {!> docs_src/getting_started/context/nats/annotated.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="1 7 16" + {!> docs_src/getting_started/context/redis/annotated.py !} ``` diff --git a/docs/includes/getting_started/context/base.md b/docs/includes/getting_started/context/base.md index bf06d56fb3..411ca193e5 100644 --- a/docs/includes/getting_started/context/base.md +++ b/docs/includes/getting_started/context/base.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="1 11" - {!> docs_src/getting_started/context/base_kafka.py !} + {!> docs_src/getting_started/context/kafka/base.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="1 11" - {!> docs_src/getting_started/context/base_rabbit.py !} + {!> docs_src/getting_started/context/rabbit/base.py !} ``` === "NATS" ```python linenums="1" hl_lines="1 11" - {!> docs_src/getting_started/context/base_nats.py !} + {!> docs_src/getting_started/context/nats/base.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="1 11" + {!> docs_src/getting_started/context/redis/base.py !} ``` diff --git a/docs/includes/getting_started/context/cast.md b/docs/includes/getting_started/context/cast.md index 440c149959..ea36117ec3 100644 --- a/docs/includes/getting_started/context/cast.md +++ b/docs/includes/getting_started/context/cast.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/cast_kafka.py [ln:14-18] !} + {!> docs_src/getting_started/context/kafka/cast.py [ln:14-18] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/cast_rabbit.py [ln:14-18] !} + {!> docs_src/getting_started/context/rabbit/cast.py [ln:14-18] !} ``` === "NATS" ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/cast_nats.py [ln:14-18] !} + {!> docs_src/getting_started/context/nats/cast.py [ln:14-18] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/redis/cast.py [ln:14-18] !} ``` diff --git a/docs/includes/getting_started/context/custom_global.md b/docs/includes/getting_started/context/custom_global.md index 7fb5d3fcbb..c4a6465183 100644 --- a/docs/includes/getting_started/context/custom_global.md +++ b/docs/includes/getting_started/context/custom_global.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="9-10" - {!> docs_src/getting_started/context/custom_global_context_kafka.py [ln:1-5,16-18] !} + {!> docs_src/getting_started/context/kafka/custom_global_context.py [ln:1-5,16-18] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="9-10" - {!> docs_src/getting_started/context/custom_global_context_rabbit.py [ln:1-5,16-18] !} + {!> docs_src/getting_started/context/rabbit/custom_global_context.py [ln:1-5,16-18] !} ``` === "NATS" ```python linenums="1" hl_lines="9-10" - {!> docs_src/getting_started/context/custom_global_context_nats.py [ln:1-5,16-18] !} + {!> docs_src/getting_started/context/nats/custom_global_context.py [ln:1-5,16-18] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="9-10" + {!> docs_src/getting_started/context/redis/custom_global_context.py [ln:1-5,16-18] !} ``` diff --git a/docs/includes/getting_started/context/custom_global_2.md b/docs/includes/getting_started/context/custom_global_2.md index cb79f703fa..2fa97110c0 100644 --- a/docs/includes/getting_started/context/custom_global_2.md +++ b/docs/includes/getting_started/context/custom_global_2.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/context/custom_global_context_kafka.py [ln:8-13] !} + {!> docs_src/getting_started/context/kafka/custom_global_context.py [ln:8-13] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/context/custom_global_context_rabbit.py [ln:8-13] !} + {!> docs_src/getting_started/context/rabbit/custom_global_context.py [ln:8-13] !} ``` === "NATS" ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/context/custom_global_context_nats.py [ln:8-13] !} + {!> docs_src/getting_started/context/nats/custom_global_context.py [ln:8-13] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/context/redis/custom_global_context.py [ln:8-13] !} ``` diff --git a/docs/includes/getting_started/context/custom_local.md b/docs/includes/getting_started/context/custom_local.md index d07cb7e922..a37f3d1bcd 100644 --- a/docs/includes/getting_started/context/custom_local.md +++ b/docs/includes/getting_started/context/custom_local.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="15 19 21-22" - {!> docs_src/getting_started/context/custom_local_context_kafka.py !} + {!> docs_src/getting_started/context/kafka/custom_local_context.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="15 19 21-22" - {!> docs_src/getting_started/context/custom_local_context_rabbit.py !} + {!> docs_src/getting_started/context/rabbit/custom_local_context.py !} ``` === "NATS" ```python linenums="1" hl_lines="15 19 21-22" - {!> docs_src/getting_started/context/custom_local_context_nats.py !} + {!> docs_src/getting_started/context/nats/custom_local_context.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="15 19 21-22" + {!> docs_src/getting_started/context/redis/custom_local_context.py !} ``` diff --git a/docs/includes/getting_started/context/default.md b/docs/includes/getting_started/context/default.md index 17deabede0..10469b278f 100644 --- a/docs/includes/getting_started/context/default.md +++ b/docs/includes/getting_started/context/default.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/default_arguments_kafka.py [ln:7-11] !} + {!> docs_src/getting_started/context/kafka/default_arguments.py [ln:7-11] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/default_arguments_rabbit.py [ln:7-11] !} + {!> docs_src/getting_started/context/rabbit/default_arguments.py [ln:7-11] !} ``` === "NATS" ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/default_arguments_nats.py [ln:7-11] !} + {!> docs_src/getting_started/context/nats/default_arguments.py [ln:7-11] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/redis/default_arguments.py [ln:7-11] !} ``` diff --git a/docs/includes/getting_started/context/existed_annotations.md b/docs/includes/getting_started/context/existed_annotations.md index 5484d18d44..73faf9bf17 100644 --- a/docs/includes/getting_started/context/existed_annotations.md +++ b/docs/includes/getting_started/context/existed_annotations.md @@ -1,7 +1,8 @@ === "Kafka" ```python from faststream.kafka.annotations import ( - Logger, ContextRepo, KafkaMessage, KafkaBroker, KafkaProducer + Logger, ContextRepo, KafkaMessage, + KafkaBroker, KafkaProducer, NoCast, ) ``` @@ -15,13 +16,14 @@ To use them, simply import and use them as subscriber argument annotations. ```python linenums="1" hl_lines="3-8 17-20" - {!> docs_src/getting_started/context/existed_context_kafka.py [ln:1-11,26-35] !} + {!> docs_src/getting_started/context/kafka/existed_context.py [ln:1-11,26-35] !} ``` === "RabbitMQ" ```python from faststream.rabbit.annotations import ( - Logger, ContextRepo, RabbitMessage, RabbitBroker, RabbitProducer + Logger, ContextRepo, RabbitMessage, + RabbitBroker, RabbitProducer, NoCast, ) ``` @@ -35,7 +37,7 @@ To use them, simply import and use them as subscriber argument annotations. ```python linenums="1" hl_lines="3-8 17-20" - {!> docs_src/getting_started/context/existed_context_rabbit.py [ln:1-11,26-35] !} + {!> docs_src/getting_started/context/rabbit/existed_context.py [ln:1-11,26-35] !} ``` === "NATS" @@ -43,7 +45,7 @@ from faststream.nats.annotations import ( Logger, ContextRepo, NatsMessage, NatsBroker, NatsProducer, NatsJsProducer, - Client, JsClient, + Client, JsClient, NoCast, ) ``` @@ -56,5 +58,25 @@ To use them, simply import and use them as subscriber argument annotations. ```python linenums="1" hl_lines="3-8 17-20" - {!> docs_src/getting_started/context/existed_context_nats.py [ln:1-11,26-35] !} + {!> docs_src/getting_started/context/nats/existed_context.py [ln:1-11,26-35] !} + ``` + +=== "Redis" + ```python + from faststream.rabbit.annotations import ( + Logger, ContextRepo, RedisMessage, + RedisBroker, Redis, NoCast, + ) + ``` + + !!! tip "" + `faststream.redis.RedisMessage` is an alias to `faststream.redis.annotations.RedisMessage` + + ```python + from faststream.redis import RedisMessage + ``` + To use them, simply import and use them as subscriber argument annotations. + + ```python linenums="1" hl_lines="3-8 17-20" + {!> docs_src/getting_started/context/redis/existed_context.py [ln:1-11,26-35] !} ``` diff --git a/docs/includes/getting_started/context/fields.md b/docs/includes/getting_started/context/fields.md index c808e174ed..9d522c432e 100644 --- a/docs/includes/getting_started/context/fields.md +++ b/docs/includes/getting_started/context/fields.md @@ -1,74 +1,99 @@ === "Kafka" ```python linenums="1" hl_lines="11-13" - {!> docs_src/getting_started/context/fields_access_kafka.py !} + {!> docs_src/getting_started/context/kafka/fields_access.py !} ``` {{ comment_1 }} ```python - {!> docs_src/getting_started/context/fields_access_kafka.py [ln:11] !} + {!> docs_src/getting_started/context/kafka/fields_access.py [ln:11] !} ``` {{ comment_2 }} ```python - {!> docs_src/getting_started/context/fields_access_kafka.py [ln:12] !} + {!> docs_src/getting_started/context/kafka/fields_access.py [ln:12] !} ``` {{ comment_3 }} ```python - {!> docs_src/getting_started/context/fields_access_kafka.py [ln:13] !} + {!> docs_src/getting_started/context/kafka/fields_access.py [ln:13] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="11-13" - {!> docs_src/getting_started/context/fields_access_rabbit.py !} + {!> docs_src/getting_started/context/rabbit/fields_access.py !} ``` {{ comment_1 }} ```python - {!> docs_src/getting_started/context/fields_access_rabbit.py [ln:11] !} + {!> docs_src/getting_started/context/rabbit/fields_access.py [ln:11] !} ``` {{ comment_2 }} ```python - {!> docs_src/getting_started/context/fields_access_rabbit.py [ln:12] !} + {!> docs_src/getting_started/context/rabbit/fields_access.py [ln:12] !} ``` {{ comment_3 }} ```python - {!> docs_src/getting_started/context/fields_access_rabbit.py [ln:13] !} + {!> docs_src/getting_started/context/rabbit/fields_access.py [ln:13] !} ``` === "NATS" ```python linenums="1" hl_lines="11-13" - {!> docs_src/getting_started/context/fields_access_nats.py !} + {!> docs_src/getting_started/context/nats/fields_access.py !} ``` {{ comment_1 }} ```python - {!> docs_src/getting_started/context/fields_access_nats.py [ln:11] !} + {!> docs_src/getting_started/context/nats/fields_access.py [ln:11] !} ``` {{ comment_2 }} ```python - {!> docs_src/getting_started/context/fields_access_nats.py [ln:12] !} + {!> docs_src/getting_started/context/nats/fields_access.py [ln:12] !} ``` {{ comment_3 }} ```python - {!> docs_src/getting_started/context/fields_access_nats.py [ln:13] !} + {!> docs_src/getting_started/context/nats/fields_access.py [ln:13] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="11-13" + {!> docs_src/getting_started/context/redis/fields_access.py !} + ``` + + {{ comment_1 }} + + ```python + {!> docs_src/getting_started/context/redis/fields_access.py [ln:11] !} + ``` + + {{ comment_2 }} + + + ```python + {!> docs_src/getting_started/context/redis/fields_access.py [ln:12] !} + ``` + + {{ comment_3 }} + + + ```python + {!> docs_src/getting_started/context/redis/fields_access.py [ln:13] !} ``` diff --git a/docs/includes/getting_started/context/manual_local.md b/docs/includes/getting_started/context/manual_local.md index 865ed11057..c569403cf9 100644 --- a/docs/includes/getting_started/context/manual_local.md +++ b/docs/includes/getting_started/context/manual_local.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="1 14 25" - {!> docs_src/getting_started/context/manual_local_context_kafka.py !} + {!> docs_src/getting_started/context/kafka/manual_local_context.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="1 14 25" - {!> docs_src/getting_started/context/manual_local_context_rabbit.py !} + {!> docs_src/getting_started/context/rabbit/manual_local_context.py !} ``` === "NATS" ```python linenums="1" hl_lines="1 14 25" - {!> docs_src/getting_started/context/manual_local_context_nats.py !} + {!> docs_src/getting_started/context/nats/manual_local_context.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="1 14 25" + {!> docs_src/getting_started/context/redis/manual_local_context.py !} ``` diff --git a/docs/includes/getting_started/context/not_cast.md b/docs/includes/getting_started/context/not_cast.md index ffe577ef33..98617f931b 100644 --- a/docs/includes/getting_started/context/not_cast.md +++ b/docs/includes/getting_started/context/not_cast.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="6 10 12" - {!> docs_src/getting_started/context/cast_kafka.py [ln:1-12] !} + {!> docs_src/getting_started/context/kafka/cast.py [ln:1-12] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="6 10 12" - {!> docs_src/getting_started/context/cast_rabbit.py [ln:1-12] !} + {!> docs_src/getting_started/context/rabbit/cast.py [ln:1-12] !} ``` === "NATS" ```python linenums="1" hl_lines="6 10 12" - {!> docs_src/getting_started/context/cast_nats.py [ln:1-12] !} + {!> docs_src/getting_started/context/nats/cast.py [ln:1-12] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="6 10 12" + {!> docs_src/getting_started/context/redis/cast.py [ln:1-12] !} ``` diff --git a/docs/includes/getting_started/dependencies/1.md b/docs/includes/getting_started/dependencies/1.md index 5c3b23ca74..d64668d206 100644 --- a/docs/includes/getting_started/dependencies/1.md +++ b/docs/includes/getting_started/dependencies/1.md @@ -15,3 +15,9 @@ from faststream.nats import NatsBroker broker = NatsBroker(..., apply_types=False) ``` + +=== "Redis" + ```python + from faststream.redis import RedisBroker + broker = RedisBroker(..., apply_types=False) + ``` diff --git a/docs/includes/getting_started/dependencies/2.md b/docs/includes/getting_started/dependencies/2.md index 2f1f062f9f..cc354f958e 100644 --- a/docs/includes/getting_started/dependencies/2.md +++ b/docs/includes/getting_started/dependencies/2.md @@ -12,3 +12,8 @@ ```python linenums="1" hl_lines="7-8" {!> docs_src/getting_started/dependencies/basic/nats/depends.py !} ``` + +=== "Redis" + ```python linenums="1" hl_lines="7-8" + {!> docs_src/getting_started/dependencies/basic/redis/depends.py !} + ``` diff --git a/docs/includes/getting_started/dependencies/3.md b/docs/includes/getting_started/dependencies/3.md index cbfeb7a380..9558227a80 100644 --- a/docs/includes/getting_started/dependencies/3.md +++ b/docs/includes/getting_started/dependencies/3.md @@ -12,3 +12,8 @@ ```python linenums="11" hl_lines="1" {!> docs_src/getting_started/dependencies/basic/nats/depends.py [ln:11-12] !} ``` + +=== "Redis" + ```python linenums="11" hl_lines="1" + {!> docs_src/getting_started/dependencies/basic/redis/depends.py [ln:11-12] !} + ``` diff --git a/docs/includes/getting_started/dependencies/4.md b/docs/includes/getting_started/dependencies/4.md index e724d8e0ca..9ebc21c6b3 100644 --- a/docs/includes/getting_started/dependencies/4.md +++ b/docs/includes/getting_started/dependencies/4.md @@ -12,3 +12,8 @@ ```python linenums="11" hl_lines="2" {!> docs_src/getting_started/dependencies/basic/nats/depends.py [ln:11-12] !} ``` + +=== "Redis" + ```python linenums="11" hl_lines="2" + {!> docs_src/getting_started/dependencies/basic/redis/depends.py [ln:11-12] !} + ``` diff --git a/docs/includes/getting_started/dependencies/5.md b/docs/includes/getting_started/dependencies/5.md index 7dea8b4846..8fb28276c7 100644 --- a/docs/includes/getting_started/dependencies/5.md +++ b/docs/includes/getting_started/dependencies/5.md @@ -18,3 +18,10 @@ ``` 1. {{ nested }} + +=== "Redis" + ```python linenums="1" hl_lines="7-8 10-11 16-17" + {!> docs_src/getting_started/dependencies/basic/redis/nested_depends.py !} + ``` + + 1. {{ nested }} diff --git a/docs/includes/getting_started/index/base.md b/docs/includes/getting_started/index/base.md index 7e4760ae1d..aed1dc0675 100644 --- a/docs/includes/getting_started/index/base.md +++ b/docs/includes/getting_started/index/base.md @@ -12,3 +12,8 @@ ```python linenums="1" title="serve.py" {!> docs_src/getting_started/index/base_nats.py!} ``` + +=== "Redis" + ```python linenums="1" title="serve.py" + {!> docs_src/getting_started/index/base_redis.py!} + ``` diff --git a/docs/includes/getting_started/index/install.md b/docs/includes/getting_started/index/install.md index 4c972ad721..d2589476ab 100644 --- a/docs/includes/getting_started/index/install.md +++ b/docs/includes/getting_started/index/install.md @@ -42,3 +42,14 @@ ```bash bash docker run -d --rm -p 4222:4222 --name test-mq nats -js ``` + +=== "Redis" + ```console + pip install "faststream[redis]" + ``` + + !!! tip + {{ run_docker }} + ```bash + bash docker run -d --rm -p 6379:6379 --name test-mq redis + ``` diff --git a/docs/includes/getting_started/integrations/fastapi/1.md b/docs/includes/getting_started/integrations/fastapi/1.md index 047132f71c..0cdff212b0 100644 --- a/docs/includes/getting_started/integrations/fastapi/1.md +++ b/docs/includes/getting_started/integrations/fastapi/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="4 6 17-20 28-29" - {!> docs_src/integrations/fastapi/base_kafka.py !} + {!> docs_src/integrations/fastapi/kafka/base.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="4 6 17-20 28-29" - {!> docs_src/integrations/fastapi/base_rabbit.py !} + {!> docs_src/integrations/fastapi/rabbit/base.py !} ``` === "NATS" ```python linenums="1" hl_lines="4 6 17-20 28-29" - {!> docs_src/integrations/fastapi/base_nats.py !} + {!> docs_src/integrations/fastapi/nats/base.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="4 6 17-20 28-29" + {!> docs_src/integrations/fastapi/redis/base.py !} ``` diff --git a/docs/includes/getting_started/integrations/fastapi/2.md b/docs/includes/getting_started/integrations/fastapi/2.md index 75ef77f290..cd95414e64 100644 --- a/docs/includes/getting_started/integrations/fastapi/2.md +++ b/docs/includes/getting_started/integrations/fastapi/2.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="12" - {!> docs_src/integrations/fastapi/send_kafka.py !} + {!> docs_src/integrations/fastapi/kafka/send.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="12" - {!> docs_src/integrations/fastapi/send_rabbit.py !} + {!> docs_src/integrations/fastapi/rabbit/send.py !} ``` === "NATS" ```python linenums="1" hl_lines="12" - {!> docs_src/integrations/fastapi/send_nats.py !} + {!> docs_src/integrations/fastapi/nats/send.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="12" + {!> docs_src/integrations/fastapi/redis/send.py !} ``` diff --git a/docs/includes/getting_started/integrations/fastapi/3.md b/docs/includes/getting_started/integrations/fastapi/3.md index edabbe976e..855f6d0876 100644 --- a/docs/includes/getting_started/integrations/fastapi/3.md +++ b/docs/includes/getting_started/integrations/fastapi/3.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="11-12 16-17" - {!> docs_src/integrations/fastapi/depends_kafka.py !} + {!> docs_src/integrations/fastapi/kafka/depends.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="11-12 16-17" - {!> docs_src/integrations/fastapi/depends_rabbit.py !} + {!> docs_src/integrations/fastapi/rabbit/depends.py !} ``` === "NATS" ```python linenums="1" hl_lines="11-12 16-17" - {!> docs_src/integrations/fastapi/depends_nats.py !} + {!> docs_src/integrations/fastapi/nats/depends.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="11-12 16-17" + {!> docs_src/integrations/fastapi/redis/depends.py !} ``` diff --git a/docs/includes/getting_started/integrations/fastapi/4.md b/docs/includes/getting_started/integrations/fastapi/4.md index 890fd8a1ec..5a2e3853b0 100644 --- a/docs/includes/getting_started/integrations/fastapi/4.md +++ b/docs/includes/getting_started/integrations/fastapi/4.md @@ -1,17 +1,23 @@ === "Kafka" ```python linenums="1" hl_lines="13-15" - {!> docs_src/integrations/fastapi/startup_kafka.py !} + {!> docs_src/integrations/fastapi/kafka/startup.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="13-15" - {!> docs_src/integrations/fastapi/startup_rabbit.py !} + {!> docs_src/integrations/fastapi/rabbit/startup.py !} ``` === "NATS" ```python linenums="1" hl_lines="13-15" - {!> docs_src/integrations/fastapi/startup_nats.py !} + {!> docs_src/integrations/fastapi/nats/startup.py !} + ``` + +=== "Redis" + + ```python linenums="1" hl_lines="13-15" + {!> docs_src/integrations/fastapi/redis/startup.py !} ``` diff --git a/docs/includes/getting_started/integrations/fastapi/5.md b/docs/includes/getting_started/integrations/fastapi/5.md index 065b27ba66..61099762e4 100644 --- a/docs/includes/getting_started/integrations/fastapi/5.md +++ b/docs/includes/getting_started/integrations/fastapi/5.md @@ -30,3 +30,14 @@ include_in_schema=True, ) ``` + +=== "Redis" + ```python + from faststream.redis.fastapi import RedisRouter + + router = RedisRouter( + ..., + schema_url="/asyncapi", + include_in_schema=True, + ) + ``` diff --git a/docs/includes/getting_started/integrations/fastapi/6.md b/docs/includes/getting_started/integrations/fastapi/6.md index 024f09aecd..7e33d59675 100644 --- a/docs/includes/getting_started/integrations/fastapi/6.md +++ b/docs/includes/getting_started/integrations/fastapi/6.md @@ -1,17 +1,23 @@ === "Kafka" ```python linenums="1" hl_lines="3 5 13-16" - {!> docs_src/integrations/fastapi/test_kafka.py !} + {!> docs_src/integrations/fastapi/kafka/test.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="3 5 13-16" - {!> docs_src/integrations/fastapi/test_rabbit.py !} + {!> docs_src/integrations/fastapi/rabbit/test.py !} ``` === "NATS" ```python linenums="1" hl_lines="3 5 13-16" - {!> docs_src/integrations/fastapi/test_nats.py !} + {!> docs_src/integrations/fastapi/nats/test.py !} + ``` + +=== "Redis" + + ```python linenums="1" hl_lines="3 5 13-16" + {!> docs_src/integrations/fastapi/redis/test.py !} ``` diff --git a/docs/includes/getting_started/lifespan/1.md b/docs/includes/getting_started/lifespan/1.md index 5575f9913c..10e88fbb4f 100644 --- a/docs/includes/getting_started/lifespan/1.md +++ b/docs/includes/getting_started/lifespan/1.md @@ -12,3 +12,8 @@ ```python linenums="1" hl_lines="14-18" {!> docs_src/getting_started/lifespan/nats/basic.py!} ``` + +=== "Redis" + ```python linenums="1" hl_lines="14-18" + {!> docs_src/getting_started/lifespan/redis/basic.py!} + ``` diff --git a/docs/includes/getting_started/lifespan/2.md b/docs/includes/getting_started/lifespan/2.md index a42668b1e7..8838459fb1 100644 --- a/docs/includes/getting_started/lifespan/2.md +++ b/docs/includes/getting_started/lifespan/2.md @@ -12,3 +12,8 @@ ```python linenums="14" hl_lines="1" {!> docs_src/getting_started/lifespan/nats/basic.py [ln:14-18]!} ``` + +=== "Redis" + ```python linenums="14" hl_lines="1" + {!> docs_src/getting_started/lifespan/redis/basic.py [ln:14-18]!} + ``` diff --git a/docs/includes/getting_started/lifespan/3.md b/docs/includes/getting_started/lifespan/3.md index 7026b047a5..66c3b1c414 100644 --- a/docs/includes/getting_started/lifespan/3.md +++ b/docs/includes/getting_started/lifespan/3.md @@ -12,3 +12,8 @@ ```python linenums="14" hl_lines="2" {!> docs_src/getting_started/lifespan/nats/basic.py [ln:14-18]!} ``` + +=== "Redis" + ```python linenums="14" hl_lines="2" + {!> docs_src/getting_started/lifespan/redis/basic.py [ln:14-18]!} + ``` diff --git a/docs/includes/getting_started/lifespan/4.md b/docs/includes/getting_started/lifespan/4.md index d3efe20ae0..668f90ef6e 100644 --- a/docs/includes/getting_started/lifespan/4.md +++ b/docs/includes/getting_started/lifespan/4.md @@ -12,3 +12,8 @@ ```python linenums="14" hl_lines="3" {!> docs_src/getting_started/lifespan/nats/basic.py [ln:14-18] !} ``` + +=== "Redis" + ```python linenums="14" hl_lines="3" + {!> docs_src/getting_started/lifespan/redis/basic.py [ln:14-18] !} + ``` diff --git a/docs/includes/getting_started/lifespan/5.md b/docs/includes/getting_started/lifespan/5.md index 850a3cd5f6..6d029271cf 100644 --- a/docs/includes/getting_started/lifespan/5.md +++ b/docs/includes/getting_started/lifespan/5.md @@ -12,3 +12,8 @@ ```python linenums="14" hl_lines="4" {!> docs_src/getting_started/lifespan/nats/basic.py [ln:14-18] !} ``` + +=== "Redis" + ```python linenums="14" hl_lines="4" + {!> docs_src/getting_started/lifespan/redis/basic.py [ln:14-18] !} + ``` diff --git a/docs/includes/getting_started/lifespan/6.md b/docs/includes/getting_started/lifespan/6.md index 02838ec650..8499e7090e 100644 --- a/docs/includes/getting_started/lifespan/6.md +++ b/docs/includes/getting_started/lifespan/6.md @@ -12,3 +12,8 @@ ```python linenums="14" hl_lines="5" {!> docs_src/getting_started/lifespan/nats/basic.py [ln:14-18] !} ``` + +=== "Redis" + ```python linenums="14" hl_lines="5" + {!> docs_src/getting_started/lifespan/redis/basic.py [ln:14-18] !} + ``` diff --git a/docs/includes/getting_started/lifespan/7.md b/docs/includes/getting_started/lifespan/7.md index 1ee6b3e6d2..6d7f854305 100644 --- a/docs/includes/getting_started/lifespan/7.md +++ b/docs/includes/getting_started/lifespan/7.md @@ -12,3 +12,8 @@ ```python linenums="1" hl_lines="14 21" {!> docs_src/getting_started/lifespan/nats/ml.py!} ``` + +=== "Redis" + ```python linenums="1" hl_lines="14 21" + {!> docs_src/getting_started/lifespan/redis/ml.py!} + ``` diff --git a/docs/includes/getting_started/lifespan/ml_context.md b/docs/includes/getting_started/lifespan/ml_context.md new file mode 100644 index 0000000000..a80c18fffd --- /dev/null +++ b/docs/includes/getting_started/lifespan/ml_context.md @@ -0,0 +1,19 @@ +=== "Kafka" + ```python linenums="1" hl_lines="16-17 22" + {!> docs_src/getting_started/lifespan/kafka/ml_context.py!} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="16-17 22" + {!> docs_src/getting_started/lifespan/rabbit/ml_context.py!} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="16-17 22" + {!> docs_src/getting_started/lifespan/nats/ml_context.py!} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="16-17 22" + {!> docs_src/getting_started/lifespan/redis/ml_context.py!} + ``` diff --git a/docs/includes/getting_started/lifespan/testing.md b/docs/includes/getting_started/lifespan/testing.md index 0a704c7b82..68238acd54 100644 --- a/docs/includes/getting_started/lifespan/testing.md +++ b/docs/includes/getting_started/lifespan/testing.md @@ -1,14 +1,19 @@ === "Kafka" - ```python linenums="1" hl_lines="3 16-17" + ```python linenums="1" hl_lines="3 18" {!> docs_src/getting_started/lifespan/kafka/testing.py !} ``` === "RabbitMQ" - ```python linenums="1" hl_lines="3 16-17" + ```python linenums="1" hl_lines="3 18" {!> docs_src/getting_started/lifespan/rabbit/testing.py !} ``` === "NATS" - ```python linenums="1" hl_lines="3 16-17" + ```python linenums="1" hl_lines="3 18" {!> docs_src/getting_started/lifespan/nats/testing.py !} ``` + +=== "Redis" + ```python linenums="1" hl_lines="3 18" + {!> docs_src/getting_started/lifespan/redis/testing.py !} + ``` diff --git a/docs/includes/getting_started/publishing/broker/1.md b/docs/includes/getting_started/publishing/broker/1.md index 9160d4edf7..e3922258d2 100644 --- a/docs/includes/getting_started/publishing/broker/1.md +++ b/docs/includes/getting_started/publishing/broker/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/getting_started/publishing/broker_kafka.py !} + {!> docs_src/getting_started/publishing/kafka/broker.py !} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/getting_started/publishing/broker_rabbit.py !} + {!> docs_src/getting_started/publishing/rabbit/broker.py !} ``` === "NATS" ```python linenums="1" - {!> docs_src/getting_started/publishing/broker_nats.py !} + {!> docs_src/getting_started/publishing/nats/broker.py !} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/getting_started/publishing/redis/broker.py !} ``` diff --git a/docs/includes/getting_started/publishing/decorator/1.md b/docs/includes/getting_started/publishing/decorator/1.md index 55b9394212..3689791fd2 100644 --- a/docs/includes/getting_started/publishing/decorator/1.md +++ b/docs/includes/getting_started/publishing/decorator/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/getting_started/publishing/decorator_kafka.py !} + {!> docs_src/getting_started/publishing/kafka/decorator.py !} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/getting_started/publishing/decorator_rabbit.py !} + {!> docs_src/getting_started/publishing/rabbit/decorator.py !} ``` === "NATS" ```python linenums="1" - {!> docs_src/getting_started/publishing/decorator_nats.py !} + {!> docs_src/getting_started/publishing/nats/decorator.py !} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/getting_started/publishing/redis/decorator.py !} ``` diff --git a/docs/includes/getting_started/publishing/direct/1.md b/docs/includes/getting_started/publishing/direct/1.md index 63685ebe6d..bf8249c88d 100644 --- a/docs/includes/getting_started/publishing/direct/1.md +++ b/docs/includes/getting_started/publishing/direct/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_kafka.py !} + {!> docs_src/getting_started/publishing/kafka/direct.py !} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_rabbit.py !} + {!> docs_src/getting_started/publishing/rabbit/direct.py !} ``` === "NATS" ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_nats.py !} + {!> docs_src/getting_started/publishing/nats/direct.py !} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/getting_started/publishing/redis/direct.py !} ``` diff --git a/docs/includes/getting_started/publishing/index.md b/docs/includes/getting_started/publishing/index.md index ef915f174f..ffcb5f8ceb 100644 --- a/docs/includes/getting_started/publishing/index.md +++ b/docs/includes/getting_started/publishing/index.md @@ -10,9 +10,14 @@ await br.publish("message", "queue") ``` - === "NATS" ```python async with NatsBroker() as br: await br.publish("message", "subject") ``` + +=== "Redis" + ```python + async with RedisBroker() as br: + await br.publish("message", "channel") + ``` diff --git a/docs/includes/getting_started/publishing/object/1.md b/docs/includes/getting_started/publishing/object/1.md index 0e481096ea..eb9cc85c39 100644 --- a/docs/includes/getting_started/publishing/object/1.md +++ b/docs/includes/getting_started/publishing/object/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_kafka.py !} + {!> docs_src/getting_started/publishing/kafka/object.py !} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_rabbit.py !} + {!> docs_src/getting_started/publishing/rabbit/object.py !} ``` === "NATS" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_nats.py !} + {!> docs_src/getting_started/publishing/nats/object.py !} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/getting_started/publishing/redis/object.py !} ``` diff --git a/docs/includes/getting_started/publishing/testing/1.md b/docs/includes/getting_started/publishing/testing/1.md index 2dc92b01d4..dbe41b71c9 100644 --- a/docs/includes/getting_started/publishing/testing/1.md +++ b/docs/includes/getting_started/publishing/testing/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_kafka.py[ln:7-12] !} + {!> docs_src/getting_started/publishing/kafka/object.py[ln:7-12] !} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_rabbit.py[ln:7-12] !} + {!> docs_src/getting_started/publishing/rabbit/object.py[ln:7-12] !} ``` === "NATS" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_nats.py[ln:7-12] !} + {!> docs_src/getting_started/publishing/nats/object.py[ln:7-12] !} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/getting_started/publishing/redis/object.py[ln:7-12] !} ``` diff --git a/docs/includes/getting_started/publishing/testing/2.md b/docs/includes/getting_started/publishing/testing/2.md index 6f3b2d0453..f17565632f 100644 --- a/docs/includes/getting_started/publishing/testing/2.md +++ b/docs/includes/getting_started/publishing/testing/2.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_kafka.py[ln:7-11] !} + {!> docs_src/getting_started/publishing/kafka/direct.py[ln:7-11] !} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_rabbit.py[ln:7-11] !} + {!> docs_src/getting_started/publishing/rabbit/direct.py[ln:7-11] !} ``` === "NATS" ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_nats.py[ln:7-11] !} + {!> docs_src/getting_started/publishing/nats/direct.py[ln:7-11] !} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/getting_started/publishing/redis/direct.py[ln:7-11] !} ``` diff --git a/docs/includes/getting_started/publishing/testing/3.md b/docs/includes/getting_started/publishing/testing/3.md index 8b4ec2c05c..2c34b8ece9 100644 --- a/docs/includes/getting_started/publishing/testing/3.md +++ b/docs/includes/getting_started/publishing/testing/3.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_kafka_testing.py [ln:1-3,8-12] !} + {!> docs_src/getting_started/publishing/kafka/object_testing.py [ln:1-3,8-12] !} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_rabbit_testing.py [ln:1-3,8-12] !} + {!> docs_src/getting_started/publishing/rabbit/object_testing.py [ln:1-3,8-12] !} ``` === "NATS" ```python linenums="1" - {!> docs_src/getting_started/publishing/object_nats_testing.py [ln:1-3,8-12] !} + {!> docs_src/getting_started/publishing/nats/object_testing.py [ln:1-3,8-12] !} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/getting_started/publishing/redis/object_testing.py [ln:1-3,8-12] !} ``` diff --git a/docs/includes/getting_started/routers/1.md b/docs/includes/getting_started/routers/1.md index 0dffa1a2e4..27c9a1e50b 100644 --- a/docs/includes/getting_started/routers/1.md +++ b/docs/includes/getting_started/routers/1.md @@ -1,65 +1,85 @@ === "Kafka" ```python hl_lines="2 6 9-10 17 22" - {!> docs_src/getting_started/routers/router_kafka.py [ln:1-6] !} + {!> docs_src/getting_started/routers/kafka/router.py [ln:1-6] !} ``` === "RabbitMQ" ```python hl_lines="2 6 9-10 17 22" - {!> docs_src/getting_started/routers/router_rabbit.py [ln:1-6] !} + {!> docs_src/getting_started/routers/rabbit/router.py [ln:1-6] !} ``` === "NATS" ```python hl_lines="2 6 9-10 17 22" - {!> docs_src/getting_started/routers/router_nats.py [ln:1-6] !} + {!> docs_src/getting_started/routers/nats/router.py [ln:1-6] !} + ``` + +=== "Redis" + ```python hl_lines="2 6 9-10 17 22" + {!> docs_src/getting_started/routers/redis/router.py [ln:1-6] !} ``` {{ note_decor }} === "Kafka" ```python hl_lines="1-2 9" - {!> docs_src/getting_started/routers/router_kafka.py [ln:9-19] !} + {!> docs_src/getting_started/routers/kafka/router.py [ln:9-19] !} ``` === "RabbitMQ" ```python hl_lines="1-2 9" - {!> docs_src/getting_started/routers/router_rabbit.py [ln:9-19] !} + {!> docs_src/getting_started/routers/rabbit/router.py [ln:9-19] !} ``` === "NATS" ```python hl_lines="1-2 9" - {!> docs_src/getting_started/routers/router_nats.py [ln:9-19] !} + {!> docs_src/getting_started/routers/nats/router.py [ln:9-19] !} + ``` + +=== "Redis" + ```python hl_lines="1-2 9" + {!> docs_src/getting_started/routers/redis/router.py [ln:9-19] !} ``` {{ note_include }} === "Kafka" ```python hl_lines="1" - {!> docs_src/getting_started/routers/router_kafka.py [ln:22] !} + {!> docs_src/getting_started/routers/kafka/router.py [ln:22] !} ``` === "RabbitMQ" ```python hl_lines="1" - {!> docs_src/getting_started/routers/router_rabbit.py [ln:22] !} + {!> docs_src/getting_started/routers/rabbit/router.py [ln:22] !} ``` === "NATS" ```python hl_lines="1" - {!> docs_src/getting_started/routers/router_nats.py [ln:22] !} + {!> docs_src/getting_started/routers/nats/router.py [ln:22] !} + ``` + +=== "NATS" + ```python hl_lines="1" + {!> docs_src/getting_started/routers/redis/router.py [ln:22] !} ``` {{ note_publish }} === "Kafka" ```python hl_lines="3" - {!> docs_src/getting_started/routers/router_kafka.py [ln:27-30] !} + {!> docs_src/getting_started/routers/kafka/router.py [ln:27-30] !} ``` === "RabbitMQ" ```python hl_lines="3" - {!> docs_src/getting_started/routers/router_rabbit.py [ln:27-30] !} + {!> docs_src/getting_started/routers/rabbit/router.py [ln:27-30] !} ``` === "NATS" ```python hl_lines="3" - {!> docs_src/getting_started/routers/router_nats.py [ln:27-30] !} + {!> docs_src/getting_started/routers/nats/router.py [ln:27-30] !} + ``` + +=== "Redis" + ```python hl_lines="3" + {!> docs_src/getting_started/routers/redis/router.py [ln:27-30] !} ``` diff --git a/docs/includes/getting_started/routers/2.md b/docs/includes/getting_started/routers/2.md index 74be0b5dd0..06da49f27a 100644 --- a/docs/includes/getting_started/routers/2.md +++ b/docs/includes/getting_started/routers/2.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="2 8 13 15" - {!> docs_src/getting_started/routers/router_delay_kafka.py [ln:1-15] !} + {!> docs_src/getting_started/routers/kafka/router_delay.py [ln:1-15] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="2 8 13 15" - {!> docs_src/getting_started/routers/router_delay_rabbit.py [ln:1-15] !} + {!> docs_src/getting_started/routers/rabbit/router_delay.py [ln:1-15] !} ``` === "NATS" ```python linenums="1" hl_lines="2 8 13 15" - {!> docs_src/getting_started/routers/router_delay_nats.py [ln:1-15] !} + {!> docs_src/getting_started/routers/nats/router_delay.py [ln:1-15] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="2 8 13 15" + {!> docs_src/getting_started/routers/redis/router_delay.py [ln:1-15] !} ``` diff --git a/docs/includes/getting_started/serialization/decoder/1.md b/docs/includes/getting_started/serialization/decoder/1.md index a135ad7ddf..74149351bc 100644 --- a/docs/includes/getting_started/serialization/decoder/1.md +++ b/docs/includes/getting_started/serialization/decoder/1.md @@ -24,3 +24,12 @@ def decoder(msg: NatsMessage) -> DecodedMessage: ... ``` + +=== "Redis" + ``` python + from faststream.types import DecodedMessage + from faststream.redis import RedisMessage + + def decoder(msg: RedisMessage) -> DecodedMessage: + ... + ``` diff --git a/docs/includes/getting_started/serialization/decoder/2.md b/docs/includes/getting_started/serialization/decoder/2.md index 9925e64adb..277ab83a55 100644 --- a/docs/includes/getting_started/serialization/decoder/2.md +++ b/docs/includes/getting_started/serialization/decoder/2.md @@ -36,3 +36,16 @@ ) -> DecodedMessage: return await original_decoder(msg) ``` + +=== "Redis" + ``` python + from types import Callable, Awaitable + from faststream.types import DecodedMessage + from faststream.redis import RedisMessage + + async def decoder( + msg: RedisMessage, + original_decoder: Callable[[RedisMessage], Awaitable[DecodedMessage]], + ) -> DecodedMessage: + return await original_decoder(msg) + ``` diff --git a/docs/includes/getting_started/serialization/parser/1.md b/docs/includes/getting_started/serialization/parser/1.md index 5981be4d79..3583383e8a 100644 --- a/docs/includes/getting_started/serialization/parser/1.md +++ b/docs/includes/getting_started/serialization/parser/1.md @@ -24,3 +24,12 @@ def parser(msg: Msg) -> NatsMessage: ... ``` + +=== "Redis" + ``` python + from faststream.redis import RedisMessage + from faststream.redis.message import PubSubMessage + + def parser(msg: PubSubMessage) -> RedisMessage: + ... + ``` diff --git a/docs/includes/getting_started/serialization/parser/2.md b/docs/includes/getting_started/serialization/parser/2.md index cd8b2a35a1..7275ff7053 100644 --- a/docs/includes/getting_started/serialization/parser/2.md +++ b/docs/includes/getting_started/serialization/parser/2.md @@ -33,6 +33,19 @@ async def parser( msg: Msg, original_parser: Callable[[Msg], Awaitable[NatsMessage]], - ) -> RabbitMessage: + ) -> NatsMessage: + return await original_parser(msg) + ``` + +=== "Redis" + ``` python + from types import Callable, Awaitable + from faststream.redis import RedisMessage + from faststream.redis.message import PubSubMessage + + async def parser( + msg: PubSubMessage, + original_parser: Callable[[PubSubMessage], Awaitable[RedisMessage]], + ) -> RedisMessage: return await original_parser(msg) ``` diff --git a/docs/includes/getting_started/serialization/parser/3.md b/docs/includes/getting_started/serialization/parser/3.md index 9b1c03f361..2dd970cdd1 100644 --- a/docs/includes/getting_started/serialization/parser/3.md +++ b/docs/includes/getting_started/serialization/parser/3.md @@ -1,15 +1,20 @@ === "Kafka" - ``` python linenums="1" hl_lines="9-15 18 28" + ``` python linenums="1" hl_lines="9-15 18 29" {!> docs_src/getting_started/serialization/parser_kafka.py !} ``` === "RabbitMQ" - ``` python linenums="1" hl_lines="9-15 18 28" + ``` python linenums="1" hl_lines="9-15 18 29" {!> docs_src/getting_started/serialization/parser_rabbit.py !} ``` === "NATS" - ``` python linenums="1" hl_lines="9-15 18 28" + ``` python linenums="1" hl_lines="9-15 18 29" {!> docs_src/getting_started/serialization/parser_nats.py !} ``` + +=== "Redis" + ``` python linenums="1" hl_lines="8-14 17 28" + {!> docs_src/getting_started/serialization/parser_redis.py !} + ``` diff --git a/docs/includes/getting_started/subscription/annotation/3.md b/docs/includes/getting_started/subscription/annotation/3.md index 653f7356ed..38f2c1d713 100644 --- a/docs/includes/getting_started/subscription/annotation/3.md +++ b/docs/includes/getting_started/subscription/annotation/3.md @@ -1,14 +1,19 @@ === "Kafka" ```python - {!> docs_src/getting_started/subscription/annotation_kafka.py [ln:8-11] !} + {!> docs_src/getting_started/subscription/kafka/annotation.py [ln:8-11] !} ``` === "RabbitMQ" ```python - {!> docs_src/getting_started/subscription/annotation_rabbit.py [ln:8-11] !} + {!> docs_src/getting_started/subscription/rabbit/annotation.py [ln:8-11] !} ``` === "NATS" ```python - {!> docs_src/getting_started/subscription/annotation_nats.py [ln:8-11] !} + {!> docs_src/getting_started/subscription/nats/annotation.py [ln:8-11] !} + ``` + +=== "Redis" + ```python + {!> docs_src/getting_started/subscription/redis/annotation.py [ln:8-11] !} ``` diff --git a/docs/includes/getting_started/subscription/filtering/1.md b/docs/includes/getting_started/subscription/filtering/1.md index d9de4f7c95..bae1c47e4d 100644 --- a/docs/includes/getting_started/subscription/filtering/1.md +++ b/docs/includes/getting_started/subscription/filtering/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/filter_kafka.py [ln:1-19] !} + {!> docs_src/getting_started/subscription/kafka/filter.py [ln:1-19] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:1-19] !} + {!> docs_src/getting_started/subscription/rabbit/filter.py [ln:1-19] !} ``` === "NATS" ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/filter_nats.py [ln:1-19] !} + {!> docs_src/getting_started/subscription/nats/filter.py [ln:1-19] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="10 17" + {!> docs_src/getting_started/subscription/redis/filter.py [ln:1-19] !} ``` diff --git a/docs/includes/getting_started/subscription/filtering/2.md b/docs/includes/getting_started/subscription/filtering/2.md index a91f0cfb42..6664f4fc4a 100644 --- a/docs/includes/getting_started/subscription/filtering/2.md +++ b/docs/includes/getting_started/subscription/filtering/2.md @@ -1,14 +1,19 @@ === "Kafka" ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_kafka.py [ln:24-27] !} + {!> docs_src/getting_started/subscription/kafka/filter.py [ln:24-27] !} ``` === "RabbitMQ" ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:24-27] !} + {!> docs_src/getting_started/subscription/rabbit/filter.py [ln:24-27] !} ``` === "NATS" ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_nats.py [ln:24-27] !} + {!> docs_src/getting_started/subscription/nats/filter.py [ln:24-27] !} + ``` + +=== "Redis" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/redis/filter.py [ln:24-27] !} ``` diff --git a/docs/includes/getting_started/subscription/filtering/3.md b/docs/includes/getting_started/subscription/filtering/3.md index 4a0b4fed9e..11664e8700 100644 --- a/docs/includes/getting_started/subscription/filtering/3.md +++ b/docs/includes/getting_started/subscription/filtering/3.md @@ -1,14 +1,19 @@ === "Kafka" ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_kafka.py [ln:29-32] !} + {!> docs_src/getting_started/subscription/kafka/filter.py [ln:29-32] !} ``` === "RabbitMQ" ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:29-32] !} + {!> docs_src/getting_started/subscription/rabbit/filter.py [ln:29-32] !} ``` === "NATS" ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_nats.py [ln:29-32] !} + {!> docs_src/getting_started/subscription/nats/filter.py [ln:29-32] !} + ``` + +=== "Redis" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/redis/filter.py [ln:29-32] !} ``` diff --git a/docs/includes/getting_started/subscription/index/1.md b/docs/includes/getting_started/subscription/index/1.md index e2cc3c4013..c8030cb8d4 100644 --- a/docs/includes/getting_started/subscription/index/1.md +++ b/docs/includes/getting_started/subscription/index/1.md @@ -30,3 +30,14 @@ async def handle_msg(msg_body): ... ``` + +=== "Redis" + ```python + from faststream.redis import RedisBroker + + broker = RedisBroker() + + @broker.subscriber("test") # channel name + async def handle_msg(msg_body): + ... + ``` diff --git a/docs/includes/getting_started/subscription/index/3.md b/docs/includes/getting_started/subscription/index/3.md index 1801912efc..c32f78dd25 100644 --- a/docs/includes/getting_started/subscription/index/3.md +++ b/docs/includes/getting_started/subscription/index/3.md @@ -30,3 +30,14 @@ async def handle_msg(msg_body: str): # just an annotation, has no real effect ... ``` + +=== "Redis" + ```python + from faststream.redis import RedisBroker + + broker = RedisBroker(apply_types=False) + + @broker.subscriber("test") + async def handle_msg(msg_body: str): # just an annotation, has no real effect + ... + ``` diff --git a/docs/includes/getting_started/subscription/index/sync.md b/docs/includes/getting_started/subscription/index/sync.md index 88fbdf9d78..d914849084 100644 --- a/docs/includes/getting_started/subscription/index/sync.md +++ b/docs/includes/getting_started/subscription/index/sync.md @@ -30,3 +30,14 @@ def handle_msg(msg_body): ... ``` + +=== "Redis" + ```python + from faststream.redis import RedisBroker + + broker = RedisBroker() + + @broker.subscriber("test") # channel name + def handle_msg(msg_body): + ... + ``` diff --git a/docs/includes/getting_started/subscription/pydantic/1.md b/docs/includes/getting_started/subscription/pydantic/1.md index 5c4309d00c..67d06fc9c9 100644 --- a/docs/includes/getting_started/subscription/pydantic/1.md +++ b/docs/includes/getting_started/subscription/pydantic/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="12-17" - {!> docs_src/getting_started/subscription/pydantic_fields_kafka.py !} + {!> docs_src/getting_started/subscription/kafka/pydantic_fields.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="12-17" - {!> docs_src/getting_started/subscription/pydantic_fields_rabbit.py !} + {!> docs_src/getting_started/subscription/rabbit/pydantic_fields.py !} ``` === "NATS" ```python linenums="1" hl_lines="12-17" - {!> docs_src/getting_started/subscription/pydantic_fields_nats.py !} + {!> docs_src/getting_started/subscription/nats/pydantic_fields.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="12-17" + {!> docs_src/getting_started/subscription/redis/pydantic_fields.py !} ``` diff --git a/docs/includes/getting_started/subscription/pydantic/2.md b/docs/includes/getting_started/subscription/pydantic/2.md index e9648df83c..4a59a975e7 100644 --- a/docs/includes/getting_started/subscription/pydantic/2.md +++ b/docs/includes/getting_started/subscription/pydantic/2.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="1 10 20" - {!> docs_src/getting_started/subscription/pydantic_model_kafka.py !} + {!> docs_src/getting_started/subscription/kafka/pydantic_model.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="1 10 20" - {!> docs_src/getting_started/subscription/pydantic_model_rabbit.py !} + {!> docs_src/getting_started/subscription/rabbit/pydantic_model.py !} ``` === "NATS" ```python linenums="1" hl_lines="1 10 20" - {!> docs_src/getting_started/subscription/pydantic_model_nats.py !} + {!> docs_src/getting_started/subscription/nats/pydantic_model.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="1 10 20" + {!> docs_src/getting_started/subscription/redis/pydantic_model.py !} ``` diff --git a/docs/includes/getting_started/subscription/testing/1.md b/docs/includes/getting_started/subscription/testing/1.md index 0f61f145a4..7e89a31404 100644 --- a/docs/includes/getting_started/subscription/testing/1.md +++ b/docs/includes/getting_started/subscription/testing/1.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" title="annotation_kafka.py" - {!> docs_src/getting_started/subscription/annotation_kafka.py !} + {!> docs_src/getting_started/subscription/kafka/annotation.py !} ``` === "RabbitMQ" ```python linenums="1" title="annotation_rabbit.py" - {!> docs_src/getting_started/subscription/annotation_rabbit.py !} + {!> docs_src/getting_started/subscription/rabbit/annotation.py !} ``` === "NATS" - ```python linenums="1" title="annotation_rabbit.py" - {!> docs_src/getting_started/subscription/annotation_nats.py !} + ```python linenums="1" title="annotation_nats.py" + {!> docs_src/getting_started/subscription/nats/annotation.py !} + ``` + +=== "Redis" + ```python linenums="1" title="annotation_redis.py" + {!> docs_src/getting_started/subscription/redis/annotation.py !} ``` diff --git a/docs/includes/getting_started/subscription/testing/2.md b/docs/includes/getting_started/subscription/testing/2.md index 1f3bb19c0a..5a6c2a4bc9 100644 --- a/docs/includes/getting_started/subscription/testing/2.md +++ b/docs/includes/getting_started/subscription/testing/2.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="4 11-12" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:1-12] !} + {!> docs_src/getting_started/subscription/kafka/testing.py [ln:1-12] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="4 11-12" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:1-12] !} + {!> docs_src/getting_started/subscription/rabbit/testing.py [ln:1-12] !} ``` === "NATS" ```python linenums="1" hl_lines="4 11-12" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:1-12] !} + {!> docs_src/getting_started/subscription/nats/testing.py [ln:1-12] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="4 11-12" + {!> docs_src/getting_started/subscription/redis/testing.py [ln:1-12] !} ``` diff --git a/docs/includes/getting_started/subscription/testing/3.md b/docs/includes/getting_started/subscription/testing/3.md index 5d1044d730..9778991d10 100644 --- a/docs/includes/getting_started/subscription/testing/3.md +++ b/docs/includes/getting_started/subscription/testing/3.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:18-23] !} + {!> docs_src/getting_started/subscription/kafka/testing.py [ln:18-23] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:18-23] !} + {!> docs_src/getting_started/subscription/rabbit/testing.py [ln:18-23] !} ``` === "NATS" ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:18-23] !} + {!> docs_src/getting_started/subscription/nats/testing.py [ln:18-23] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/subscription/redis/testing.py [ln:18-23] !} ``` diff --git a/docs/includes/getting_started/subscription/testing/4.md b/docs/includes/getting_started/subscription/testing/4.md index c2d5da1a24..7f2c809ef4 100644 --- a/docs/includes/getting_started/subscription/testing/4.md +++ b/docs/includes/getting_started/subscription/testing/4.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="6" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:9-14] !} + {!> docs_src/getting_started/subscription/kafka/testing.py [ln:9-14] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="6" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:9-14] !} + {!> docs_src/getting_started/subscription/rabbit/testing.py [ln:9-14] !} ``` === "NATS" ```python linenums="1" hl_lines="6" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:9-14] !} + {!> docs_src/getting_started/subscription/nats/testing.py [ln:9-14] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="6" + {!> docs_src/getting_started/subscription/redis/testing.py [ln:9-14] !} ``` diff --git a/docs/includes/getting_started/subscription/testing/5.md b/docs/includes/getting_started/subscription/testing/5.md index 1a180e85a4..717318354e 100644 --- a/docs/includes/getting_started/subscription/testing/5.md +++ b/docs/includes/getting_started/subscription/testing/5.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="6 8" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:9-16] !} + {!> docs_src/getting_started/subscription/kafka/testing.py [ln:9-16] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="6 8" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:9-16] !} + {!> docs_src/getting_started/subscription/rabbit/testing.py [ln:9-16] !} ``` === "NATS" ```python linenums="1" hl_lines="6 8" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:9-16] !} + {!> docs_src/getting_started/subscription/nats/testing.py [ln:9-16] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="6 8" + {!> docs_src/getting_started/subscription/redis/testing.py [ln:9-16] !} ``` diff --git a/docs/includes/getting_started/subscription/testing/real.md b/docs/includes/getting_started/subscription/testing/real.md index b0de793546..b5774069ea 100644 --- a/docs/includes/getting_started/subscription/testing/real.md +++ b/docs/includes/getting_started/subscription/testing/real.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="4 11 13 20 23" - {!> docs_src/getting_started/subscription/real_testing_kafka.py !} + {!> docs_src/getting_started/subscription/kafka/real_testing.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="4 11 13 20 23" - {!> docs_src/getting_started/subscription/real_testing_rabbit.py !} + {!> docs_src/getting_started/subscription/rabbit/real_testing.py !} ``` === "NATS" ```python linenums="1" hl_lines="4 11 13 20 23" - {!> docs_src/getting_started/subscription/real_testing_nats.py !} + {!> docs_src/getting_started/subscription/nats/real_testing.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="4 11 13 20 23" + {!> docs_src/getting_started/subscription/redis/real_testing.py !} ``` diff --git a/docs/includes/index/1.md b/docs/includes/index/1.md index dd7d4311c8..37027374be 100644 --- a/docs/includes/index/1.md +++ b/docs/includes/index/1.md @@ -12,3 +12,8 @@ ```sh pip install faststream[nats] ``` + +=== "Redis" + ```sh + pip install faststream[redis] + ``` diff --git a/docs/includes/index/2.md b/docs/includes/index/2.md index baed0a0b80..a8da404053 100644 --- a/docs/includes/index/2.md +++ b/docs/includes/index/2.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="9" - {!> docs_src/index/basic_kafka.py!} + {!> docs_src/index/kafka/basic.py!} ``` === "RabbitMQ" ```python linenums="1" hl_lines="9" - {!> docs_src/index/basic_rabbit.py!} + {!> docs_src/index/rabbit/basic.py!} ``` === "NATS" ```python linenums="1" hl_lines="9" - {!> docs_src/index/basic_nats.py!} + {!> docs_src/index/nats/basic.py!} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="9" + {!> docs_src/index/redis/basic.py!} ``` diff --git a/docs/includes/index/3.md b/docs/includes/index/3.md index db9183bc5f..1143e10e03 100644 --- a/docs/includes/index/3.md +++ b/docs/includes/index/3.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" hl_lines="1 8 14" - {!> docs_src/index/pydantic_kafka.py !} + {!> docs_src/index/kafka/pydantic.py !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="1 8 14" - {!> docs_src/index/pydantic_rabbit.py !} + {!> docs_src/index/rabbit/pydantic.py !} ``` === "NATS" ```python linenums="1" hl_lines="1 8 14" - {!> docs_src/index/pydantic_nats.py !} + {!> docs_src/index/nats/pydantic.py !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="1 8 14" + {!> docs_src/index/redis/pydantic.py !} ``` diff --git a/docs/includes/index/4.md b/docs/includes/index/4.md index 101dbec6cf..965821528c 100644 --- a/docs/includes/index/4.md +++ b/docs/includes/index/4.md @@ -2,19 +2,26 @@ ```python linenums="1" hl_lines="5 10 18-19" # Code above omitted 👆 - {!> docs_src/index/test_kafka.py [ln:3-21] !} + {!> docs_src/index/kafka/test.py [ln:3-21] !} ``` === "RabbitMQ" ```python linenums="1" hl_lines="5 10 18-19" # Code above omitted 👆 - {!> docs_src/index/test_rabbit.py [ln:3-21] !} + {!> docs_src/index/rabbit/test.py [ln:3-21] !} ``` === "NATS" ```python linenums="1" hl_lines="5 10 18-19" # Code above omitted 👆 - {!> docs_src/index/test_nats.py [ln:3-21] !} + {!> docs_src/index/nats/test.py [ln:3-21] !} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="5 10 18-19" + # Code above omitted 👆 + + {!> docs_src/index/redis/test.py [ln:3-21] !} ``` diff --git a/docs/includes/scheduling/app.md b/docs/includes/scheduling/app.md index 67969ac9fc..f0675e724a 100644 --- a/docs/includes/scheduling/app.md +++ b/docs/includes/scheduling/app.md @@ -1,14 +1,19 @@ === "Kafka" ```python linenums="1" - {!> docs_src/index/basic_kafka.py!} + {!> docs_src/index/kafka/basic.py!} ``` === "RabbitMQ" ```python linenums="1" - {!> docs_src/index/basic_rabbit.py!} + {!> docs_src/index/rabbit/basic.py!} ``` === "NATS" ```python linenums="1" - {!> docs_src/index/basic_nats.py!} + {!> docs_src/index/nats/basic.py!} + ``` + +=== "Redis" + ```python linenums="1" + {!> docs_src/index/redis/basic.py!} ``` diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 018a92f7e6..e278b06ce3 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -17,6 +17,10 @@ repo_name: airtai/faststream repo_url: https://github.com/airtai/faststream edit_uri: https://github.com/airtai/faststream/tree/main/docs/docs +exclude_docs: | + navigation_template.txt + SUMMARY.md + theme: name: material custom_dir: overrides @@ -65,9 +69,9 @@ extra_javascript: - javascripts/extra.js plugins: - # - meta # (insiders) use .meta.yml files - search: separator: '[\s\-,:!=\[\]()"`/]+|\.(?!\d)|&[lg]t;|(?!\b)(?=[A-Z][a-z])' + # - meta # (insiders) use .meta.yml files - glightbox # image zoom - macros: # Jinja templates include_dir: includes @@ -119,7 +123,7 @@ plugins: cache_safe: true css_files: - stylesheets/extra.css - - mike: + - mike: # versioning alias_type: copy redirect_template: templates/redirect.html canonical_version: latest diff --git a/docs/update_releases.py b/docs/update_releases.py index 829e16f430..9a1d79ac19 100644 --- a/docs/update_releases.py +++ b/docs/update_releases.py @@ -1,9 +1,8 @@ import re from pathlib import Path -from typing import List, Tuple +from typing import List, Sequence, Tuple import requests -import typer def find_metablock(lines: List[str]) -> Tuple[List[str], List[str]]: @@ -26,28 +25,24 @@ def find_header(lines: List[str]) -> Tuple[str, List[str]]: return "", lines -def get_github_releases(): +def get_github_releases() -> Sequence[Tuple[str, str]]: # Get the latest version from GitHub releases response = requests.get("https://api.github.com/repos/airtai/FastStream/releases") - return response.json() + return ((x["tag_name"], x["body"]) for x in reversed(response.json())) def convert_links_and_usernames(text): if "](" not in text: # Convert HTTP/HTTPS links - text = re.sub(r'(https?://[^\s]+)', r'[\1](\1){.external-link target="_blank"}', text) + text = re.sub(r"(https?://[^\s]+)", r'[\1](\1){.external-link target="_blank"}', text) # Convert GitHub usernames to links - text = re.sub(r'@(\w+)', r'[@\1](https://github.com/\1){.external-link target="_blank"}', text) + text = re.sub(r"@(\w+)", r'[@\1](https://github.com/\1){.external-link target="_blank"}', text) return text def update_release_notes(realease_notes_path: Path): - typer.echo("Updating Release Notes") - - releases = get_github_releases() - # Get the changelog from the RELEASE.md file changelog = realease_notes_path.read_text() @@ -57,9 +52,8 @@ def update_release_notes(realease_notes_path: Path): header, changelog = find_header(lines) changelog = "\n".join(changelog) - for release in reversed(releases): - version = release["tag_name"] - body = release["body"].replace("##", "###") + for version, body in get_github_releases(): + body = body.replace("##", "###") body = convert_links_and_usernames(body) version_changelog = f"## {version}\n\n{body}\n\n" @@ -70,7 +64,7 @@ def update_release_notes(realease_notes_path: Path): # Update the RELEASE.md file with the latest version and changelog realease_notes_path.write_text(( metablock + "\n\n" + - header + "\n" + + header + "\n" + # adding an aaddition newline after the header results in one empty file being added every time we run the script changelog + "\n" ).replace("\r", "")) diff --git a/examples/redis/channel_sub.py b/examples/redis/channel_sub.py new file mode 100644 index 0000000000..959345aa45 --- /dev/null +++ b/examples/redis/channel_sub.py @@ -0,0 +1,15 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber("test") +async def handle(msg: str, logger: Logger): + logger.info(msg) + + +@app.after_startup +async def t(): + await broker.publish("Hi!", "test") diff --git a/examples/redis/channel_sub_pattern.py b/examples/redis/channel_sub_pattern.py new file mode 100644 index 0000000000..c2570c7e55 --- /dev/null +++ b/examples/redis/channel_sub_pattern.py @@ -0,0 +1,23 @@ +from faststream import FastStream, Logger, Path +from faststream.redis import PubSub, RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber("logs.{level}") +async def handle_logs(msg: str, logger: Logger, level: str = Path()): + logger.info(f"{level}: {msg}") + + +@broker.subscriber(channel=PubSub("test.*", pattern=True)) +async def handle_test(msg: str, logger: Logger): + logger.info(msg) + + +@app.after_startup +async def t(): + # publish to hanle_logs + await broker.publish("Hi!", "logs.info") + # publish to hanle_test + await broker.publish("Hi!", "test.smth") diff --git a/examples/redis/list_sub.py b/examples/redis/list_sub.py new file mode 100644 index 0000000000..1ff8b5e0db --- /dev/null +++ b/examples/redis/list_sub.py @@ -0,0 +1,15 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(list="test-list") +async def handle(msg: str, logger: Logger): + logger.info(msg) + + +@app.after_startup +async def t(): + await broker.publish("Hi!", list="test-list") diff --git a/examples/redis/list_sub_batch.py b/examples/redis/list_sub_batch.py new file mode 100644 index 0000000000..31a4c3b592 --- /dev/null +++ b/examples/redis/list_sub_batch.py @@ -0,0 +1,15 @@ +from faststream import FastStream, Logger +from faststream.redis import ListSub, RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(list=ListSub("test-list", batch=True)) +async def handle(msg: list[str], logger: Logger): + logger.info(msg) + + +@app.after_startup +async def t(): + await broker.publish_batch("Hi ", "again, ", "FastStream!", list="test-list") diff --git a/examples/redis/rpc.py b/examples/redis/rpc.py new file mode 100644 index 0000000000..009d4496fd --- /dev/null +++ b/examples/redis/rpc.py @@ -0,0 +1,49 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(channel="test-channel") +async def handle_channel(msg: str, logger: Logger): + logger.info(msg) + return msg + + +@broker.subscriber(list="test-list") +async def handle_list(msg: str, logger: Logger): + logger.info(msg) + return msg + + +@broker.subscriber(stream="test-stream") +async def handle_stream(msg: str, logger: Logger): + logger.info(msg) + return msg + + +@app.after_startup +async def t(): + msg = "Hi!" + + assert msg == await broker.publish( + "Hi!", + channel="test-channel", + rpc=True, + rpc_timeout=3.0, + ) + + assert msg == await broker.publish( + "Hi!", + list="test-list", + rpc=True, + rpc_timeout=3.0, + ) + + assert msg == await broker.publish( + "Hi!", + stream="test-stream", + rpc=True, + rpc_timeout=3.0, + ) diff --git a/examples/redis/stream_sub.py b/examples/redis/stream_sub.py new file mode 100644 index 0000000000..f528f35c4b --- /dev/null +++ b/examples/redis/stream_sub.py @@ -0,0 +1,15 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(stream="test-stream") +async def handle(msg: str, logger: Logger): + logger.info(msg) + + +@app.after_startup +async def t(): + await broker.publish("Hi!", stream="test-stream") diff --git a/examples/redis/stream_sub_batch.py b/examples/redis/stream_sub_batch.py new file mode 100644 index 0000000000..bdf0971e86 --- /dev/null +++ b/examples/redis/stream_sub_batch.py @@ -0,0 +1,17 @@ +from faststream import FastStream, Logger +from faststream.redis import RedisBroker, StreamSub + +broker = RedisBroker() +app = FastStream(broker) + + +@broker.subscriber(stream=StreamSub("test-stream", batch=True)) +async def handle(msg: list[str], logger: Logger): + logger.info(msg) + + +@app.after_startup +async def t(): + await broker.publish("Hi ", stream="test-stream") + await broker.publish("again, ", stream="test-stream") + await broker.publish("FastStream!", stream="test-stream") diff --git a/faststream/__about__.py b/faststream/__about__.py index 56f8cfbad8..40be3bb7bd 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,8 +1,13 @@ """Simple and fast framework to create message brokers based microservices""" -__version__ = "0.2.15" +__version__ = "0.3.0rc0" INSTALL_YAML = """ To generate YAML documentation, please install dependencies:\n pip install PyYAML """ + +INSTALL_WATCHFILES = """ +To use restart feature, please install dependencies:\n +pip install watchfiles +""" diff --git a/faststream/__main__.py b/faststream/__main__.py index b867746646..4e61d9660a 100644 --- a/faststream/__main__.py +++ b/faststream/__main__.py @@ -1,4 +1,8 @@ +import warnings + from faststream.cli.main import cli +warnings.filterwarnings("default", category=ImportWarning, module="faststream") + if __name__ == "__main__": cli(prog_name="faststream") diff --git a/faststream/_compat.py b/faststream/_compat.py index 3270031892..3d55e37dc3 100644 --- a/faststream/_compat.py +++ b/faststream/_compat.py @@ -1,4 +1,3 @@ -import importlib.util import json import os import sys @@ -13,17 +12,21 @@ if sys.version_info < (3, 12): from typing_extensions import TypedDict as TypedDict + from typing_extensions import Unpack as Unpack from typing_extensions import override as override else: from typing import TypedDict as TypedDict + from typing import Unpack as Unpack from typing import override as override if sys.version_info < (3, 11): from typing_extensions import Never as Never + from typing_extensions import NotRequired as NotRequired from typing_extensions import Required as Required from typing_extensions import Self as Self else: from typing import Never as Never + from typing import NotRequired as NotRequired from typing import Required as Required from typing import Self as Self @@ -47,10 +50,6 @@ ModelVar = TypeVar("ModelVar", bound=BaseModel) -def is_installed(package: str) -> bool: - return bool(importlib.util.find_spec(package)) - - IS_OPTIMIZED = os.getenv("PYTHONOPTIMIZE", False) @@ -83,12 +82,9 @@ def raise_fastapi_validation_error(errors: List[Any], body: AnyDict) -> Never: except ImportError: HAS_FASTAPI = False - JsonSchemaValue = Mapping[str, Any] if PYDANTIC_V2: - from pydantic import ConfigDict as ConfigDict - if PYDANTIC_VERSION >= "2.4.0": from pydantic.annotated_handlers import ( GetJsonSchemaHandler as GetJsonSchemaHandler, @@ -103,6 +99,7 @@ def raise_fastapi_validation_error(errors: List[Any], body: AnyDict) -> Never: from pydantic_core.core_schema import ( general_plain_validator_function as with_info_plain_validator_function, ) + from pydantic_core import CoreSchema as CoreSchema from pydantic_core import to_jsonable_python @@ -138,18 +135,11 @@ def model_copy(model: ModelVar, **kwargs: Any) -> ModelVar: return model.model_copy(**kwargs) else: - from pydantic.config import BaseConfig, get_config - from pydantic.config import ConfigDict as CD from pydantic.json import pydantic_encoder GetJsonSchemaHandler = Any # type: ignore[assignment,misc] CoreSchema = Any # type: ignore[assignment,misc] - def ConfigDict( # type: ignore[no-redef] - **kwargs: Any, - ) -> Type[BaseConfig]: - return get_config(CD(**kwargs)) # type: ignore - SCHEMA_FIELD = "schema_extra" def dump_json(data: Any) -> str: diff --git a/faststream/app.py b/faststream/app.py index c14e00bcd5..61733a00fe 100644 --- a/faststream/app.py +++ b/faststream/app.py @@ -19,9 +19,9 @@ from faststream.broker.core.asyncronous import BrokerAsyncUsecase from faststream.cli.supervisors.utils import set_exit from faststream.log import logger -from faststream.types import AnyCallable, AnyDict, AsyncFunc, SettingField +from faststream.types import AnyCallable, AnyDict, AsyncFunc, Lifespan, SettingField from faststream.utils import apply_types, context -from faststream.utils.functions import to_async +from faststream.utils.functions import drop_response_type, fake_context, to_async P_HookParams = ParamSpec("P_HookParams") T_HookReturn = TypeVar("T_HookReturn") @@ -212,6 +212,7 @@ def __init__( self, broker: Optional[BrokerAsyncUsecase[Any, Any]] = None, logger: Optional[logging.Logger] = logger, + lifespan: Optional[Lifespan] = None, # AsyncAPI args, title: str = "FastStream", version: str = "0.1.0", @@ -250,6 +251,15 @@ def __init__( self._stop_event = None + self.lifespan_context = ( + apply_types( + func=lifespan, + wrap_model=drop_response_type, + ) + if lifespan is not None + else fake_context + ) + set_exit(lambda *_: self.__exit()) def on_startup( @@ -330,10 +340,15 @@ async def run( assert self.broker, "You should setup a broker" # nosec B101 self._init_async_cycle() - async with anyio.create_task_group() as tg: - tg.start_soon(self._start, log_level, run_extra_options) - await self._stop(log_level) - tg.cancel_scope.cancel() + async with self.lifespan_context(**(run_extra_options or {})): + try: + async with anyio.create_task_group() as tg: + tg.start_soon(self._start, log_level, run_extra_options) + await self._stop(log_level) + tg.cancel_scope.cancel() + except anyio.ExceptionGroup as e: + for ex in e.exceptions: + raise ex from None def _init_async_cycle(self) -> None: if self._stop_event is None: diff --git a/faststream/asyncapi/base.py b/faststream/asyncapi/base.py index 07a23bfbcd..7707331e25 100644 --- a/faststream/asyncapi/base.py +++ b/faststream/asyncapi/base.py @@ -1,9 +1,11 @@ from abc import abstractproperty +from dataclasses import dataclass, field from typing import Dict from faststream.asyncapi.schema.channels import Channel +@dataclass class AsyncAPIOperation: """A class representing an asynchronous API operation. @@ -17,6 +19,8 @@ class AsyncAPIOperation: The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ + include_in_schema: bool = field(default=True) + @abstractproperty def name(self) -> str: raise NotImplementedError() diff --git a/faststream/asyncapi/message.py b/faststream/asyncapi/message.py index f86bd93957..26500b1581 100644 --- a/faststream/asyncapi/message.py +++ b/faststream/asyncapi/message.py @@ -180,10 +180,10 @@ def get_model_schema( if model is None: model = call - body = model_schema(model) + body: Dict[str, Any] = model_schema(model) if params_number == 1 and not use_original_model: - param_body = body.get("properties", {}) + param_body: Dict[str, Any] = body.get("properties", {}) param_body = param_body[name] if PYDANTIC_V2: diff --git a/faststream/asyncapi/schema/bindings/amqp.py b/faststream/asyncapi/schema/bindings/amqp.py index 0cd2deb5e3..686eccedc3 100644 --- a/faststream/asyncapi/schema/bindings/amqp.py +++ b/faststream/asyncapi/schema/bindings/amqp.py @@ -1,6 +1,6 @@ from typing import Literal, Optional -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, PositiveInt class Queue(BaseModel): @@ -93,7 +93,7 @@ class OperationBinding(BaseModel): cc: Optional[str] = None ack: bool = True replyTo: Optional[str] = None - priority: Optional[int] = None - mandatory: Optional[bool] = None deliveryMode: Optional[int] = None + mandatory: Optional[bool] = None + priority: Optional[PositiveInt] = None bindingVersion: str = "0.2.0" diff --git a/faststream/asyncapi/schema/bindings/redis.py b/faststream/asyncapi/schema/bindings/redis.py index 45aba5c9b0..3868aa184a 100644 --- a/faststream/asyncapi/schema/bindings/redis.py +++ b/faststream/asyncapi/schema/bindings/redis.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Literal, Optional +from typing import Any, Dict, Optional from pydantic import BaseModel @@ -29,7 +29,9 @@ class ChannelBinding(BaseModel): """ channel: str - method: Literal["ssubscribe", "psubscribe", "subscribe"] = "subscribe" + method: Optional[str] = None + group_name: Optional[str] = None + consumer_name: Optional[str] = None bindingVersion: str = "custom" diff --git a/faststream/asyncapi/schema/info.py b/faststream/asyncapi/schema/info.py index 7c50ec070d..0c095080c8 100644 --- a/faststream/asyncapi/schema/info.py +++ b/faststream/asyncapi/schema/info.py @@ -9,15 +9,18 @@ JsonSchemaValue, Required, TypedDict, - is_installed, with_info_plain_validator_function, ) from faststream.log import logger -if is_installed("email_validator"): +try: + import email_validator + + if email_validator is None: + raise ImportError from pydantic import EmailStr -else: # pragma: no cover +except ImportError: # pragma: no cover # NOTE: EmailStr mock was copied from the FastAPI # https://github.com/tiangolo/fastapi/blob/master/fastapi/openapi/models.py#24 class EmailStr(str): # type: ignore diff --git a/faststream/broker/core/abc.py b/faststream/broker/core/abc.py index c911f9b415..cb1bfc8d8d 100644 --- a/faststream/broker/core/abc.py +++ b/faststream/broker/core/abc.py @@ -8,9 +8,9 @@ Any, Awaitable, Callable, - Dict, Generic, List, + Mapping, Optional, Sequence, Sized, @@ -23,7 +23,6 @@ from fast_depends._compat import PYDANTIC_V2 from fast_depends.core import CallModel, build_call_model from fast_depends.dependencies import Depends -from fast_depends.use import _InjectWrapper from pydantic import create_model from faststream.asyncapi import schema as asyncapi @@ -97,8 +96,8 @@ class BrokerUsecase( logger: Optional[logging.Logger] log_level: int - handlers: Dict[Any, BaseHandler[MsgType]] - _publishers: Dict[Any, BasePublisher[MsgType]] + handlers: Mapping[Any, BaseHandler[MsgType]] + _publishers: Mapping[Any, BasePublisher[MsgType]] dependencies: Sequence[Depends] started: bool @@ -113,7 +112,7 @@ def __init__( url: Union[str, List[str]], *args: Any, # AsyncAPI kwargs - protocol: str, + protocol: Optional[str] = None, protocol_version: Optional[str] = None, description: Optional[str] = None, tags: Optional[Sequence[Union[asyncapi.Tag, asyncapi.TagDict]]] = None, @@ -203,7 +202,7 @@ def include_router(self, router: BrokerRouter[Any, MsgType]) -> None: for r in router._handlers: self.subscriber(*r.args, **r.kwargs)(r.call) - self._publishers.update(router._publishers) + self._publishers = {**self._publishers, **router._publishers} def include_routers(self, *routers: BrokerRouter[Any, MsgType]) -> None: """Includes routers in the current object. @@ -258,10 +257,7 @@ def _wrap_handler( _get_dependant: Optional[Any] = None, ) -> Tuple[ HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn], - Union[ - CallModel[P_HandlerParams, T_HandlerReturn], - CallModel[P_HandlerParams, Awaitable[T_HandlerReturn]], - ], + CallModel[Any, Any], ]: """Wrap a handler function. @@ -309,10 +305,7 @@ def _wrap_handler( dependant = _patch_fastapi_dependant(dependant) if self._is_apply_types is True and not _raw: - apply_wrapper: _InjectWrapper[ - P_HandlerParams, Awaitable[T_HandlerReturn] - ] = apply_types(None) - f = apply_wrapper(f, dependant) + f = apply_types(None)(f, dependant) # type: ignore[arg-type] decode_f = self._wrap_decode_message( func=f, @@ -493,7 +486,7 @@ def publisher( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - self._publishers[key] = publisher + self._publishers = {**self._publishers, key: publisher} return publisher @abstractmethod diff --git a/faststream/broker/core/asyncronous.py b/faststream/broker/core/asyncronous.py index 2df0c58d62..66f42611b5 100644 --- a/faststream/broker/core/asyncronous.py +++ b/faststream/broker/core/asyncronous.py @@ -6,7 +6,6 @@ Any, Awaitable, Callable, - Dict, Mapping, Optional, Sequence, @@ -39,7 +38,6 @@ WrappedReturn, ) from faststream.broker.wrapper import HandlerCallWrapper -from faststream.exceptions import AckMessage, NackMessage, RejectMessage from faststream.log import access_logger from faststream.types import SendableMessage from faststream.utils.functions import to_async @@ -81,7 +79,7 @@ class BrokerAsyncUsecase(BrokerUsecase[MsgType, ConnectionType]): The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - handlers: Dict[Any, AsyncHandler[MsgType]] # type: ignore[assignment] + handlers: Mapping[Any, AsyncHandler[MsgType]] middlewares: Sequence[Callable[[MsgType], BaseMiddleware]] _global_parser: Optional[AsyncCustomParser[MsgType, StreamMessage[MsgType]]] _global_decoder: Optional[AsyncCustomDecoder[StreamMessage[MsgType]]] @@ -479,14 +477,4 @@ async def _execute_handler( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - try: - return await func(message) - except AckMessage as e: - await message.ack() - raise e - except NackMessage as e: - await message.nack() - raise e - except RejectMessage as e: - await message.reject() - raise e + return await func(message) diff --git a/faststream/broker/core/mixins.py b/faststream/broker/core/mixins.py index e26f51465d..710eeeae5c 100644 --- a/faststream/broker/core/mixins.py +++ b/faststream/broker/core/mixins.py @@ -50,6 +50,7 @@ def __init__( self.logger = logger self.log_level = log_level self._fmt = log_fmt + self._message_id_ln = 10 @property def fmt(self) -> str: # pragma: no cover @@ -74,7 +75,7 @@ def _get_log_context( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ return { - "message_id": message.message_id[:10] if message else "", + "message_id": message.message_id[: self._message_id_ln] if message else "", } def _log( diff --git a/faststream/broker/fastapi/router.py b/faststream/broker/fastapi/router.py index e383972ce7..b5cc2adcd6 100644 --- a/faststream/broker/fastapi/router.py +++ b/faststream/broker/fastapi/router.py @@ -628,8 +628,11 @@ def include_router( """ if isinstance(router, StreamRouter): # pragma: no branch self._setup_log_context(self.broker, router.broker) - self.broker.handlers.update(router.broker.handlers) - self.broker._publishers.update(router.broker._publishers) + self.broker.handlers = {**self.broker.handlers, **router.broker.handlers} + self.broker._publishers = { + **self.broker._publishers, + **router.broker._publishers, + } super().include_router( router=router, diff --git a/faststream/broker/handler.py b/faststream/broker/handler.py index e56c7ccea5..277610e323 100644 --- a/faststream/broker/handler.py +++ b/faststream/broker/handler.py @@ -94,6 +94,7 @@ def __init__( log_context_builder: Callable[[StreamMessage[Any]], Dict[str, str]], description: Optional[str] = None, title: Optional[str] = None, + include_in_schema: bool = True, ): """Initialize a new instance of the class. @@ -106,10 +107,14 @@ def __init__( """ self.calls = [] # type: ignore[assignment] self.global_middlewares = [] + + self.log_context_builder = log_context_builder + self.running = False + # AsyncAPI information self._description = description self._title = title - self.log_context_builder = log_context_builder + self.include_in_schema = include_in_schema @property def call_name(self) -> str: @@ -338,15 +343,17 @@ async def consume(self, msg: MsgType) -> SendableMessage: # type: ignore[overri if IS_OPTIMIZED: # pragma: no cover break - assert processed, "You have to consume message" # nosec B101 + assert ( + not self.running or processed + ), "You have to consume message" # nosec B101 context.reset_local("log_context", log_context_tag) return result_msg @abstractmethod async def start(self) -> None: - raise NotImplementedError() + self.running = True @abstractmethod async def close(self) -> None: - raise NotImplementedError() + self.running = False diff --git a/faststream/broker/message.py b/faststream/broker/message.py index 481e08df57..f9d0e154ab 100644 --- a/faststream/broker/message.py +++ b/faststream/broker/message.py @@ -1,4 +1,3 @@ -from abc import abstractmethod from dataclasses import dataclass, field from typing import Any, Generic, Optional, TypeVar, Union from uuid import uuid4 @@ -38,99 +37,30 @@ class ABCStreamMessage(Generic[Msg]): reply_to: str = "" message_id: str = field(default_factory=lambda: str(uuid4())) # pragma: no cover correlation_id: str = field( - default_factory=lambda: str(uuid4()) - ) # pragma: no cover + default_factory=lambda: str(uuid4()) # pragma: no cover + ) - processed: bool = False + processed: bool = field(default=False, init=False) + commited: bool = field(default=False, init=False) class SyncStreamMessage(ABCStreamMessage[Msg]): - @abstractmethod def ack(self, **kwargs: Any) -> None: - raise NotImplementedError() + self.commited = True - @abstractmethod def nack(self, **kwargs: Any) -> None: - """A function to raise a NotImplementedError. + self.commited = True - Args: - kwargs: Additional keyword arguments - - Returns: - None - !!! note - - The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) - """ - raise NotImplementedError() - - @abstractmethod def reject(self, **kwargs: Any) -> None: - """Reject function. - - Args: - **kwargs: Arbitrary keyword arguments - - Returns: - None - - Raises: - NotImplementedError: Always raises NotImplementedError - !!! note - - The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) - """ - raise NotImplementedError() + self.commited = True class StreamMessage(ABCStreamMessage[Msg]): - @abstractmethod async def ack(self, **kwargs: Any) -> None: - """Acknowledge method. - - Args: - **kwargs: Additional keyword arguments - - Raises: - NotImplementedError: If the method is not implemented. - !!! note + self.commited = True - The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) - """ - raise NotImplementedError() - - @abstractmethod async def nack(self, **kwargs: Any) -> None: - """A function to handle a nack. - - Args: - **kwargs: Additional arguments - - Returns: - None - - Raises: - NotImplementedError: If the function is not implemented. - !!! note - - The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) - """ - raise NotImplementedError() + self.commited = True - @abstractmethod async def reject(self, **kwargs: Any) -> None: - """Rejects the operation. - - Args: - **kwargs: Additional keyword arguments - - Returns: - None - - Raises: - NotImplementedError: Always raises NotImplementedError - !!! note - - The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) - """ - raise NotImplementedError() + self.commited = True diff --git a/faststream/broker/middlewares.py b/faststream/broker/middlewares.py index b2c85447c7..27fe29f003 100644 --- a/faststream/broker/middlewares.py +++ b/faststream/broker/middlewares.py @@ -277,7 +277,7 @@ def __call__(self, msg: Any) -> Self: async def on_consume(self, msg: DecodedMessage) -> DecodedMessage: if self.logger is not None: - c = context.get("log_context") + c = context.get_local("log_context") self.logger.log(self.log_level, "Received", extra=c) return await super().on_consume(msg) @@ -302,7 +302,7 @@ async def after_processed( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ if self.logger is not None: - c = context.get("log_context") + c = context.get_local("log_context") if exc_type and exc_val: self.logger.error( diff --git a/faststream/broker/router.py b/faststream/broker/router.py index c47df1cb78..05dc9a6f69 100644 --- a/faststream/broker/router.py +++ b/faststream/broker/router.py @@ -156,6 +156,7 @@ def __init__( ] = None, parser: Optional[CustomParser[MsgType, StreamMessage[MsgType]]] = None, decoder: Optional[CustomDecoder[StreamMessage[MsgType]]] = None, + include_in_schema: Optional[bool] = None, ): """Initialize a class object. @@ -171,6 +172,7 @@ def __init__( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ self.prefix = prefix + self.include_in_schema = include_in_schema self._handlers = list(handlers) self._publishers = {} self._dependencies = dependencies @@ -194,6 +196,7 @@ def subscriber( ] = None, parser: Optional[CustomParser[MsgType, StreamMessage[MsgType]]] = None, decoder: Optional[CustomDecoder[StreamMessage[MsgType]]] = None, + include_in_schema: Optional[bool] = None, **kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -235,6 +238,7 @@ def _wrap_subscriber( ] = None, parser: Optional[CustomParser[MsgType, StreamMessage[MsgType]]] = None, decoder: Optional[CustomDecoder[StreamMessage[MsgType]]] = None, + include_in_schema: bool = True, **kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -283,6 +287,11 @@ def router_subscriber_wrapper( middlewares=(*(self._middlewares or ()), *(middlewares or ())) or None, parser=parser or self._parser, decoder=decoder or self._decoder, + include_in_schema=( + include_in_schema + if self.include_in_schema is None + else self.include_in_schema + ), **kwargs, ) self._handlers.append(route) diff --git a/faststream/broker/schemas.py b/faststream/broker/schemas.py index c4e5ccace8..0352b469b1 100644 --- a/faststream/broker/schemas.py +++ b/faststream/broker/schemas.py @@ -60,10 +60,15 @@ def __init__(self, name: str, **kwargs: Any) -> None: """ super().__init__(name=name, **kwargs) + def __hash__(self) -> int: + return hash(self.name) + @overload @classmethod def validate( - cls: Type[NameRequiredCls], value: Union[str, NameRequiredCls] + cls: Type[NameRequiredCls], + value: Union[str, NameRequiredCls], + **kwargs: Any, ) -> NameRequiredCls: """Validates a value. @@ -83,7 +88,11 @@ def validate( @overload @classmethod - def validate(cls: Type[NameRequiredCls], value: None) -> None: + def validate( + cls: Type[NameRequiredCls], + value: None, + **kwargs: Any, + ) -> None: """Validate a value. Args: @@ -99,7 +108,9 @@ def validate(cls: Type[NameRequiredCls], value: None) -> None: @classmethod def validate( - cls: Type[NameRequiredCls], value: Union[str, NameRequiredCls, None] + cls: Type[NameRequiredCls], + value: Union[str, NameRequiredCls, None], + **kwargs: Any, ) -> Optional[NameRequiredCls]: """Validates a value. @@ -114,7 +125,7 @@ def validate( """ if value is not None: if isinstance(value, str): - value = cls(value) + value = cls(value, **kwargs) return value diff --git a/faststream/broker/test.py b/faststream/broker/test.py index 6d3702d203..1b217a9123 100644 --- a/faststream/broker/test.py +++ b/faststream/broker/test.py @@ -1,3 +1,4 @@ +import warnings from abc import abstractmethod from contextlib import ExitStack, asynccontextmanager from functools import partial @@ -15,13 +16,13 @@ from faststream.broker.middlewares import CriticalLogMiddleware from faststream.broker.wrapper import HandlerCallWrapper from faststream.types import SendableMessage, SettingField +from faststream.utils.ast import is_contains_context_name from faststream.utils.functions import timeout_scope Broker = TypeVar("Broker", bound=BrokerAsyncUsecase[Any, Any]) class TestApp: - # make sure pytest doesn't try to collect this class as a test class """A class to represent a test application. Attributes: @@ -38,10 +39,11 @@ class TestApp: The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ + __test__ = False app: FastStream - _extra_options: Optional[Dict[str, SettingField]] + _extra_options: Dict[str, SettingField] def __init__( self, @@ -61,12 +63,15 @@ def __init__( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ self.app = app - self._extra_options = run_extra_options + self._extra_options = run_extra_options or {} def __enter__(self) -> FastStream: with ExitStack() as stack: portal = stack.enter_context(start_blocking_portal()) portal.call(self.app._init_async_cycle) + + lifespan_context = self.app.lifespan_context(**self._extra_options) + stack.enter_context(portal.wrap_async_context_manager(lifespan_context)) portal.call(partial(self.app._start, run_extra_options=self._extra_options)) @stack.callback @@ -83,12 +88,14 @@ def __exit__( exc_val: Optional[BaseException] = None, exec_tb: Optional[TracebackType] = None, ) -> None: - assert self.app._stop_event # nosec B101 + assert self.app._stop_event, "You should call `__enter__` first" # nosec B101 self.app._stop_event.set() self.exit_stack.close() async def __aenter__(self) -> FastStream: self.app._init_async_cycle() + self.lifespan_scope = self.app.lifespan_context(**self._extra_options) + await self.lifespan_scope.__aenter__() await self.app._start(run_extra_options=self._extra_options) self._task = tg = anyio.create_task_group() await tg.__aenter__() @@ -114,7 +121,8 @@ async def __aexit__( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - assert self.app._stop_event # nosec B101 + assert self.app._stop_event, "You should call `__enter__` first" # nosec B101 + await self.lifespan_scope.__aexit__(exc_type, exc_val, exec_tb) self.app._stop_event.set() await self._task.__aexit__(None, None, None) @@ -127,10 +135,31 @@ def __init__( self, broker: Broker, with_real: bool = False, - connect_only: bool = False, + connect_only: Optional[bool] = None, ): self.with_real = with_real self.broker = broker + + if connect_only is None: + try: + connect_only = is_contains_context_name( + self.__class__.__name__, + TestApp.__name__, + ) + + except Exception as e: + warnings.warn( + ( + f"\nError `{repr(e)}` occured at `{self.__class__.__name__}` AST parsing" + "\nPlease, report us by creating an Issue with your TestClient usecase" + "\nhttps://github.com/airtai/faststream/issues/new?labels=bug&template=bug_report.md&title=Bug:%20TestClient%20AST%20parsing" + ), + category=RuntimeWarning, + stacklevel=1, + ) + + connect_only = False + self.connect_only = connect_only async def __aenter__(self) -> Broker: diff --git a/faststream/broker/types.py b/faststream/broker/types.py index 3a3032c639..9a34c6841c 100644 --- a/faststream/broker/types.py +++ b/faststream/broker/types.py @@ -1,6 +1,6 @@ from typing import Any, Awaitable, Callable, Optional, Protocol, Tuple, TypeVar, Union -from faststream._compat import ParamSpec +from faststream._compat import ParamSpec, TypeAlias from faststream.broker.message import StreamMessage from faststream.types import DecodedMessage, SendableMessage @@ -9,57 +9,58 @@ StreamMsg = TypeVar("StreamMsg", bound=StreamMessage[Any]) ConnectionType = TypeVar("ConnectionType") -SyncFilter = Callable[[StreamMsg], bool] -AsyncFilter = Callable[[StreamMsg], Awaitable[bool]] -Filter = Union[ + +SyncFilter: TypeAlias = Callable[[StreamMsg], bool] +AsyncFilter: TypeAlias = Callable[[StreamMsg], Awaitable[bool]] +Filter: TypeAlias = Union[ SyncFilter[StreamMsg], AsyncFilter[StreamMsg], ] -SyncParser = Callable[ +SyncParser: TypeAlias = Callable[ [MsgType], StreamMsg, ] -AsyncParser = Callable[ +AsyncParser: TypeAlias = Callable[ [MsgType], Awaitable[StreamMsg], ] -AsyncCustomParser = Union[ +AsyncCustomParser: TypeAlias = Union[ AsyncParser[MsgType, StreamMsg], Callable[ [MsgType, AsyncParser[MsgType, StreamMsg]], Awaitable[StreamMsg], ], ] -Parser = Union[ +Parser: TypeAlias = Union[ AsyncParser[MsgType, StreamMsg], SyncParser[MsgType, StreamMsg], ] -CustomParser = Union[ +CustomParser: TypeAlias = Union[ AsyncCustomParser[MsgType, StreamMsg], SyncParser[MsgType, StreamMsg], ] -SyncDecoder = Callable[ +SyncDecoder: TypeAlias = Callable[ [StreamMsg], DecodedMessage, ] -AsyncDecoder = Callable[ +AsyncDecoder: TypeAlias = Callable[ [StreamMsg], Awaitable[DecodedMessage], ] -AsyncCustomDecoder = Union[ +AsyncCustomDecoder: TypeAlias = Union[ AsyncDecoder[StreamMsg], Callable[ [StreamMsg, AsyncDecoder[StreamMsg]], Awaitable[DecodedMessage], ], ] -Decoder = Union[ +Decoder: TypeAlias = Union[ AsyncDecoder[StreamMsg], SyncDecoder[StreamMsg], ] -CustomDecoder = Union[ +CustomDecoder: TypeAlias = Union[ AsyncCustomDecoder[StreamMsg], SyncDecoder[StreamMsg], ] @@ -113,17 +114,17 @@ async def publish( ... -WrappedReturn = Tuple[T_HandlerReturn, Optional[AsyncPublisherProtocol]] +WrappedReturn: TypeAlias = Tuple[T_HandlerReturn, Optional[AsyncPublisherProtocol]] -AsyncWrappedHandlerCall = Callable[ +AsyncWrappedHandlerCall: TypeAlias = Callable[ [StreamMessage[MsgType]], Awaitable[Optional[WrappedReturn[T_HandlerReturn]]], ] -SyncWrappedHandlerCall = Callable[ +SyncWrappedHandlerCall: TypeAlias = Callable[ [StreamMessage[MsgType]], Optional[WrappedReturn[T_HandlerReturn]], ] -WrappedHandlerCall = Union[ +WrappedHandlerCall: TypeAlias = Union[ AsyncWrappedHandlerCall[MsgType, T_HandlerReturn], SyncWrappedHandlerCall[MsgType, T_HandlerReturn], ] diff --git a/faststream/cli/docs/app.py b/faststream/cli/docs/app.py index 650c2a9cba..58d5623cdd 100644 --- a/faststream/cli/docs/app.py +++ b/faststream/cli/docs/app.py @@ -1,10 +1,11 @@ import json +import warnings from pathlib import Path -from typing import Optional +from typing import Optional, Sequence import typer -from faststream.__about__ import INSTALL_YAML +from faststream.__about__ import INSTALL_WATCHFILES, INSTALL_YAML from faststream._compat import model_parse from faststream.asyncapi.generate import get_app_schema from faststream.asyncapi.schema import Schema @@ -28,14 +29,27 @@ def serve( 8000, help="documentation hosting port", ), + reload: bool = typer.Option( + False, + "--reload", + is_flag=True, + help="Restart documentation at directory files changes", + ), ) -> None: """Serve project AsyncAPI schema""" + if ":" in app: - _, app_obj = import_from_string(app) + module, app_obj = import_from_string(app) raw_schema = get_app_schema(app_obj) + module_parent = module.parent + extra_extensions: Sequence[str] = () + else: - schema_filepath = Path.cwd() / app + module_parent = Path.cwd() + schema_filepath = module_parent / app + extra_extensions = (schema_filepath.suffix,) + if schema_filepath.suffix == ".json": data = schema_filepath.read_text() @@ -50,6 +64,7 @@ def serve( schema = yaml.safe_load(f) data = json.dumps(schema) + else: raise ValueError( f"Unknown extension given - {app}; Please provide app in format [python_module:FastStream] or [asyncapi.yaml/.json] - path to your application or documentation" @@ -57,11 +72,24 @@ def serve( raw_schema = model_parse(Schema, data) - serve_app( - schema=raw_schema, - host=host, - port=port, - ) + if reload is True: + try: + from faststream.cli.supervisors.watchfiles import WatchReloader + + except ImportError: + warnings.warn(INSTALL_WATCHFILES, category=ImportWarning, stacklevel=1) + serve_app(raw_schema, host, port) + + else: + WatchReloader( + target=serve_app, + args=(raw_schema, host, port), + reload_dirs=(str(module_parent),), + extra_extensions=extra_extensions, + ).run() + + else: + serve_app(raw_schema, host, port) @docs_app.command(name="gen") diff --git a/faststream/cli/main.py b/faststream/cli/main.py index 242b78a9d2..a685b1ba27 100644 --- a/faststream/cli/main.py +++ b/faststream/cli/main.py @@ -1,11 +1,15 @@ import logging import sys -from typing import Dict, Optional +import warnings +from typing import Dict, List, Optional import anyio import typer +from click.exceptions import MissingParameter +from pydantic import ValidationError +from typer.core import TyperOption -from faststream.__about__ import __version__ +from faststream.__about__ import INSTALL_WATCHFILES, __version__ from faststream.cli.docs.app import docs_app from faststream.cli.utils.imports import import_from_string from faststream.cli.utils.logs import LogLevels, get_log_level, set_log_level @@ -87,8 +91,16 @@ def run( is_flag=True, help="Restart app at directory files changes", ), - app_dir: Optional[str] = typer.Option( - None, + watch_extensions: List[str] = typer.Option( + (), + "--extension", + "--reload-extension", + "--reload-ext", + "--ext", + help="List of file extensions to watch by", + ), + app_dir: str = typer.Option( + ".", "--app-dir", help=( "Look for APP in the specified directory, by adding this to the PYTHONPATH." @@ -97,6 +109,12 @@ def run( ), ) -> None: """Run [MODULE:APP] FastStream application""" + if watch_extensions and not reload: + typer.echo( + "Extra reload extensions has no effect without `--reload` flag." + "\nProbably, you forgot it?" + ) + app, extra = parse_cli_args(app, *ctx.args) casted_log_level = get_log_level(log_level) @@ -109,15 +127,20 @@ def run( raise ValueError("You can't use reload option with multiprocessing") if reload is True: - from faststream.cli.supervisors.watchfiles import WatchReloader + try: + from faststream.cli.supervisors.watchfiles import WatchReloader + except ImportError: + warnings.warn(INSTALL_WATCHFILES, category=ImportWarning, stacklevel=1) + _run(*args) - module_path, _ = import_from_string(app) + else: + module_path, _ = import_from_string(app) - WatchReloader( - target=_run, - args=args, - reload_dirs=[str(module_path)] + ([app_dir] if app_dir else []), - ).run() + WatchReloader( + target=_run, + args=args, + reload_dirs=[str(module_path)] + ([app_dir] if app_dir else []), + ).run() elif workers > 1: from faststream.cli.supervisors.multiprocess import Multiprocess @@ -129,11 +152,7 @@ def run( ).run() else: - _run( - app=app, - extra_options=extra, - log_level=casted_log_level, - ) + _run(*args) def _run( @@ -154,9 +173,6 @@ def _run( Returns: None - Raises: - ImportError: If `uvloop` is not installed. - Note: This function uses the `anyio.run()` function to run the application. !!! note @@ -167,10 +183,7 @@ def _run( set_log_level(log_level, app_obj) - if sys.platform not in ("win32", "cygwin", "cli") and sys.version_info < ( - 3, - 12, - ): # pragma: no cover + if sys.platform not in ("win32", "cygwin", "cli"): # pragma: no cover try: import uvloop except ImportError: @@ -178,8 +191,23 @@ def _run( else: uvloop.install() # type: ignore[attr-defined] - anyio.run( - app_obj.run, - app_level, - extra_options, - ) + try: + anyio.run( + app_obj.run, + app_level, + extra_options, + ) + + except ValidationError as e: + ex = MissingParameter( + param=TyperOption(param_decls=[f"--{x['loc'][0]}" for x in e.errors()]) + ) + + try: + from typer import rich_utils + + rich_utils.rich_format_error(ex) + except ImportError: + ex.show() + + sys.exit(1) diff --git a/faststream/cli/supervisors/watchfiles.py b/faststream/cli/supervisors/watchfiles.py index 58c5ead269..1dd05734ba 100644 --- a/faststream/cli/supervisors/watchfiles.py +++ b/faststream/cli/supervisors/watchfiles.py @@ -8,7 +8,7 @@ from faststream.types import DecoratedCallable -class ExtendedFilter(watchfiles.PythonFilter): +class ExtendedFilter(watchfiles.PythonFilter): # type: ignore[misc] """A class that extends the `watchfiles.PythonFilter` class. Attributes: @@ -48,7 +48,6 @@ def __init__( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ super().__init__(ignore_paths=ignore_paths, extra_extensions=extra_extensions) - self.extensions = self.extensions + (".env", ".yaml") self.ignore_dirs = self.ignore_dirs + ( "venv", "env", @@ -56,6 +55,7 @@ def __init__( ".mypy_cache", ".pytest_cache", ".ruff_cache", + "__pycache__", ) @@ -81,6 +81,7 @@ def __init__( args: Tuple[Any, ...], reload_dirs: Sequence[Union[Path, str]], reload_delay: float = 0.3, + extra_extensions: Sequence[str] = (), ) -> None: """Initialize a WatchFilesReloader object. @@ -102,7 +103,7 @@ def __init__( self.watcher = watchfiles.watch( *reload_dirs, step=int(reload_delay * 1000), - watch_filter=ExtendedFilter(), + watch_filter=ExtendedFilter(extra_extensions=extra_extensions), stop_event=self.should_exit, yield_on_timeout=True, ) diff --git a/faststream/cli/utils/parser.py b/faststream/cli/utils/parser.py index 1270c5dbcb..894053f2a2 100644 --- a/faststream/cli/utils/parser.py +++ b/faststream/cli/utils/parser.py @@ -43,7 +43,14 @@ def parse_cli_args(*args: str) -> Tuple[str, Dict[str, SettingField]]: else: v = field_args - extra_kwargs[remove_prefix(k, "no_")] = v + key = remove_prefix(k, "no_") + if (exists := extra_kwargs.get(key)) is not None: + if not isinstance(exists, list): + v = [exists, v] + else: + v = exists + [v] + + extra_kwargs[key] = v field_args = [] k = item @@ -57,6 +64,8 @@ def parse_cli_args(*args: str) -> Tuple[str, Dict[str, SettingField]]: def remove_prefix(text: str, prefix: str) -> str: """Removes a prefix from a given text. + Python 3.8 compatibility function + Args: text (str): The text from which the prefix will be removed. prefix (str): The prefix to be removed from the text. diff --git a/faststream/exceptions.py b/faststream/exceptions.py index 9dd1f05c18..4aa4888a2d 100644 --- a/faststream/exceptions.py +++ b/faststream/exceptions.py @@ -26,3 +26,6 @@ class RejectMessage(HandlerException): "You should use `reply_to` to send response to long-living queue " "and `rpc` to get response in sync mode." ) + + +NOT_CONNECTED_YET = "Please, `connect()` the broker first" diff --git a/faststream/kafka/asyncapi.py b/faststream/kafka/asyncapi.py index a00b9391a1..253e40d815 100644 --- a/faststream/kafka/asyncapi.py +++ b/faststream/kafka/asyncapi.py @@ -25,6 +25,9 @@ class Handler(LogicHandler, AsyncAPIOperation): """ def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + channels = {} payloads = self.get_payloads() @@ -65,6 +68,9 @@ class Publisher(LogicPublisher, AsyncAPIOperation): """ def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + payloads = self.get_payloads() return { diff --git a/faststream/kafka/broker.py b/faststream/kafka/broker.py index 8edbbcb34f..77aba1a38e 100644 --- a/faststream/kafka/broker.py +++ b/faststream/kafka/broker.py @@ -6,6 +6,7 @@ Callable, Dict, Iterable, + List, Literal, Optional, Sequence, @@ -35,6 +36,7 @@ WrappedReturn, ) from faststream.broker.wrapper import FakePublisher, HandlerCallWrapper +from faststream.exceptions import NOT_CONNECTED_YET from faststream.kafka.asyncapi import Handler, Publisher from faststream.kafka.message import KafkaMessage from faststream.kafka.producer import AioKafkaFastProducer @@ -72,8 +74,9 @@ class KafkaBroker( publish(*args, **kwargs): Publishes a message to Kafka. """ - handlers: Dict[str, Handler] # type: ignore[assignment] - _publishers: Dict[str, Publisher] # type: ignore[assignment] + url: List[str] + handlers: Dict[str, Handler] + _publishers: Dict[str, Publisher] _producer: Optional[AioKafkaFastProducer] def __init__( @@ -104,7 +107,9 @@ def __init__( protocol = "kafka" super().__init__( - url=bootstrap_servers, + url=[bootstrap_servers] + if isinstance(bootstrap_servers, str) + else list(bootstrap_servers), protocol=protocol, protocol_version=protocol_version, security=security, @@ -186,8 +191,8 @@ async def start(self) -> None: """ Start the KafkaBroker and message handlers. """ - context.set_local( - "log_context", + context.set_global( + "default_log_context", self._get_log_context(None, ""), ) @@ -233,7 +238,7 @@ async def process_wrapper( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ async with WatcherContext(watcher, message): - r = await self._execute_handler(func, message) + r = await func(message) pub_response: Optional[AsyncPublisherProtocol] if message.reply_to: @@ -307,6 +312,7 @@ def subscriber( # type: ignore[override] # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **original_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -407,6 +413,7 @@ def subscriber( # type: ignore[override] batch=batch, batch_timeout_ms=batch_timeout_ms, max_records=max_records, + include_in_schema=include_in_schema, ), ) @@ -464,6 +471,7 @@ def publisher( # type: ignore[override] title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: """ Create a message publisher for the specified topic. @@ -496,9 +504,12 @@ def publisher( # type: ignore[override] title=title, _description=description, _schema=schema, + include_in_schema=include_in_schema, ), ) super().publisher(topic, publisher) + if self._producer is not None: + publisher._producer = self._producer return publisher @override @@ -517,7 +528,7 @@ async def publish( # type: ignore[override] Raises: RuntimeError: If KafkaBroker is not started yet. """ - assert self._producer, "KafkaBroker is not started yet" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 return await self._producer.publish(*args, **kwargs) async def publish_batch( @@ -535,5 +546,5 @@ async def publish_batch( Raises: RuntimeError: If KafkaBroker is not started yet. """ - assert self._producer, "KafkaBroker is not started yet" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 await self._producer.publish_batch(*args, **kwargs) diff --git a/faststream/kafka/broker.pyi b/faststream/kafka/broker.pyi index 417b42cfe3..4e21c418c8 100644 --- a/faststream/kafka/broker.pyi +++ b/faststream/kafka/broker.pyi @@ -56,8 +56,8 @@ class KafkaBroker( KafkaLoggingMixin, BrokerAsyncUsecase[aiokafka.ConsumerRecord, ConsumerConnectionParams], ): - handlers: Dict[str, Handler] # type: ignore[assignment] - _publishers: Dict[str, Publisher] # type: ignore[assignment] + handlers: Dict[str, Handler] + _publishers: Dict[str, Publisher] _producer: Optional[AioKafkaFastProducer] def __init__( @@ -246,6 +246,7 @@ class KafkaBroker( # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -308,6 +309,7 @@ class KafkaBroker( # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -329,6 +331,7 @@ class KafkaBroker( title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: ... @override async def publish( # type: ignore[override] diff --git a/faststream/kafka/fastapi.pyi b/faststream/kafka/fastapi.pyi index 340842f505..e5707ea408 100644 --- a/faststream/kafka/fastapi.pyi +++ b/faststream/kafka/fastapi.pyi @@ -194,6 +194,7 @@ class KafkaRouter(StreamRouter[ConsumerRecord]): # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -254,6 +255,7 @@ class KafkaRouter(StreamRouter[ConsumerRecord]): # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -392,6 +394,7 @@ class KafkaRouter(StreamRouter[ConsumerRecord]): title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: ... @overload def after_startup( diff --git a/faststream/kafka/handler.py b/faststream/kafka/handler.py index f427fcae90..3997296de8 100644 --- a/faststream/kafka/handler.py +++ b/faststream/kafka/handler.py @@ -8,7 +8,7 @@ from fast_depends.core import CallModel from faststream.__about__ import __version__ -from faststream._compat import override +from faststream._compat import Unpack, override from faststream.broker.handler import AsyncHandler from faststream.broker.message import StreamMessage from faststream.broker.middlewares import BaseMiddleware @@ -23,6 +23,7 @@ from faststream.broker.wrapper import HandlerCallWrapper from faststream.kafka.message import KafkaMessage from faststream.kafka.parser import AioKafkaParser +from faststream.kafka.shared.schemas import ConsumerConnectionParams class LogicHandler(AsyncHandler[ConsumerRecord]): @@ -69,6 +70,7 @@ def __init__( # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, ): """Initialize a Kafka consumer for the specified topics. @@ -93,6 +95,7 @@ def __init__( log_context_builder=log_context_builder, description=description, title=title, + include_in_schema=include_in_schema, ) self.group_id = group_id @@ -108,8 +111,11 @@ def __init__( self.task = None self.consumer = None - # TODO: use **kwargs: Unpack[ConsumerConnectionParams] with py3.12 release 2023-10-02 - async def start(self, **consumer_kwargs: Any) -> None: + @override + async def start( # type: ignore[override] + self, + **consumer_kwargs: Unpack[ConsumerConnectionParams], + ) -> None: """Start the consumer. Args: @@ -129,8 +135,11 @@ async def start(self, **consumer_kwargs: Any) -> None: ) await consumer.start() self.task = asyncio.create_task(self._consume()) + await super().start() async def close(self) -> None: + await super().close() + if self.consumer is not None: await self.consumer.stop() self.consumer = None diff --git a/faststream/kafka/message.py b/faststream/kafka/message.py index 599d7595f9..569102f12f 100644 --- a/faststream/kafka/message.py +++ b/faststream/kafka/message.py @@ -33,7 +33,6 @@ def __init__( self.is_manual = is_manual self.consumer = consumer - self.commited = False async def ack(self, **kwargs: Any) -> None: """ @@ -47,28 +46,4 @@ async def ack(self, **kwargs: Any) -> None: """ if self.is_manual and not self.commited: await self.consumer.commit() - self.commited = True - - async def nack(self, **kwargs: Any) -> None: - """ - Negative acknowledgment of the Kafka message. - - Args: - **kwargs (Any): Additional keyword arguments. - - Returns: - None: This method does not return a value. - """ - self.commited = True - - async def reject(self, **kwargs: Any) -> None: - """ - Reject the Kafka message. - - Args: - **kwargs (Any): Additional keyword arguments. - - Returns: - None: This method does not return a value. - """ - self.commited = True + await super().ack() diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py index 6f33ec5cf6..83c976e2bb 100644 --- a/faststream/kafka/parser.py +++ b/faststream/kafka/parser.py @@ -27,7 +27,7 @@ async def parse_message( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ headers = {i: j.decode() for i, j in message.headers} - handler = context.get("handler_") + handler = context.get_local("handler_") return KafkaMessage( body=message.value, headers=headers, @@ -64,7 +64,7 @@ async def parse_message_batch( first = message[0] last = message[-1] headers = {i: j.decode() for i, j in first.headers} - handler = context.get("handler_") + handler = context.get_local("handler_") return KafkaMessage( body=[m.value for m in message], headers=headers, diff --git a/faststream/kafka/producer.py b/faststream/kafka/producer.py index 737378da01..6690f7123c 100644 --- a/faststream/kafka/producer.py +++ b/faststream/kafka/producer.py @@ -4,6 +4,7 @@ from aiokafka import AIOKafkaProducer from faststream.broker.parsers import encode_message +from faststream.exceptions import NOT_CONNECTED_YET from faststream.types import SendableMessage @@ -71,7 +72,7 @@ async def publish( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - assert self._producer, "You need to connect broker at first" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 message, content_type = encode_message(message) @@ -125,7 +126,7 @@ async def publish_batch( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - assert self._producer, "You need to connect broker at first" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 batch = self._producer.create_batch() diff --git a/faststream/kafka/publisher.py b/faststream/kafka/publisher.py index 97b73c75cb..865be59d06 100644 --- a/faststream/kafka/publisher.py +++ b/faststream/kafka/publisher.py @@ -5,6 +5,7 @@ from faststream.__about__ import __version__ from faststream._compat import override +from faststream.exceptions import NOT_CONNECTED_YET from faststream.kafka.producer import AioKafkaFastProducer from faststream.kafka.shared.publisher import ABCPublisher from faststream.types import SendableMessage @@ -66,10 +67,11 @@ async def publish( # type: ignore[override] The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - assert self._producer, "Please, setup `_producer` first" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 assert ( # nosec B101 self.batch or len(messages) < 2 ), "You can't send multiple messages without `batch` flag" + assert self.topic, "You have to specify outgoing topic" # nosec B101 if not self.batch: return await self._producer.publish( diff --git a/faststream/kafka/router.py b/faststream/kafka/router.py index 53ca2186ab..e0e3cc3e74 100644 --- a/faststream/kafka/router.py +++ b/faststream/kafka/router.py @@ -73,6 +73,7 @@ def publisher( # type: ignore[override] title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: """Publishes a message to a topic. @@ -106,6 +107,11 @@ def publisher( # type: ignore[override] batch=batch, _description=description, _schema=schema, + include_in_schema=( + include_in_schema + if self.include_in_schema is None + else self.include_in_schema + ), ), ) publisher_key = self._get_publisher_key(new_publisher) diff --git a/faststream/kafka/router.pyi b/faststream/kafka/router.pyi index ea43e885c7..10500ccf4c 100644 --- a/faststream/kafka/router.pyi +++ b/faststream/kafka/router.pyi @@ -37,6 +37,7 @@ class KafkaRouter(BrokerRouter[str, aiokafka.ConsumerRecord]): ] = None, parser: Optional[CustomParser[aiokafka.ConsumerRecord, KafkaMessage]] = None, decoder: Optional[CustomDecoder[KafkaMessage]] = None, + include_in_schema: bool = True, ): ... @override @staticmethod @@ -61,6 +62,7 @@ class KafkaRouter(BrokerRouter[str, aiokafka.ConsumerRecord]): title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: ... @override def subscriber( # type: ignore[override] @@ -115,6 +117,7 @@ class KafkaRouter(BrokerRouter[str, aiokafka.ConsumerRecord]): # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], diff --git a/faststream/kafka/shared/logging.py b/faststream/kafka/shared/logging.py index 07007a5dfb..d100b031b5 100644 --- a/faststream/kafka/shared/logging.py +++ b/faststream/kafka/shared/logging.py @@ -100,7 +100,7 @@ def fmt(self) -> str: "%(asctime)s %(levelname)s - " + f"%(topic)-{self._max_topic_len}s | " + (f"%(group_id)-{self._max_group_len}s | " if self._max_group_len else "") - + "%(message_id)-10s " + + f"%(message_id)-{self._message_id_ln}s " + "- %(message)s" ) diff --git a/faststream/kafka/shared/router.pyi b/faststream/kafka/shared/router.pyi index 11e4dde8a8..52657b4f96 100644 --- a/faststream/kafka/shared/router.pyi +++ b/faststream/kafka/shared/router.pyi @@ -72,6 +72,7 @@ class KafkaRoute: # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> None: ... @overload diff --git a/faststream/log/formatter.py b/faststream/log/formatter.py index 76bdb206d6..29eb6425e8 100644 --- a/faststream/log/formatter.py +++ b/faststream/log/formatter.py @@ -152,7 +152,9 @@ def make_record_with_extra( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ if extra is None: - extra = context.get_local("log_context") + extra = context.get_local( + "log_context", default=context.get("default_log_context") + ) record = original_makeRecord( self, diff --git a/faststream/nats/asyncapi.py b/faststream/nats/asyncapi.py index d05532bdb0..aa12937f66 100644 --- a/faststream/nats/asyncapi.py +++ b/faststream/nats/asyncapi.py @@ -15,6 +15,9 @@ class Handler(LogicNatsHandler): def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + payloads = self.get_payloads() handler_name = self._title or f"{self.subject}:{self.call_name}" return { @@ -41,6 +44,9 @@ def schema(self) -> Dict[str, Channel]: class Publisher(LogicPublisher): def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + payloads = self.get_payloads() return { diff --git a/faststream/nats/broker.py b/faststream/nats/broker.py index 7dc520e515..a78acb15c6 100644 --- a/faststream/nats/broker.py +++ b/faststream/nats/broker.py @@ -1,7 +1,17 @@ import logging from functools import partial, wraps from types import TracebackType -from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Type, Union +from typing import ( + Any, + Awaitable, + Callable, + Dict, + List, + Optional, + Sequence, + Type, + Union, +) import nats from fast_depends.dependencies import Depends @@ -18,7 +28,7 @@ JetStreamContext, ) -from faststream._compat import override +from faststream._compat import TypeAlias, override from faststream.broker.core.asyncronous import BrokerAsyncUsecase, default_filter from faststream.broker.message import StreamMessage from faststream.broker.middlewares import BaseMiddleware @@ -33,6 +43,7 @@ WrappedReturn, ) from faststream.broker.wrapper import FakePublisher, HandlerCallWrapper +from faststream.exceptions import NOT_CONNECTED_YET from faststream.nats.asyncapi import Handler, Publisher from faststream.nats.helpers import stream_builder from faststream.nats.js_stream import JStream @@ -43,17 +54,18 @@ from faststream.types import AnyDict, DecodedMessage from faststream.utils.context.main import context -Subject = str +Subject: TypeAlias = str class NatsBroker( NatsLoggingMixin, BrokerAsyncUsecase[Msg, Client], ): + url: List[str] stream: Optional[JetStreamContext] - handlers: Dict[Subject, Handler] # type: ignore[assignment] - _publishers: Dict[Subject, Publisher] # type: ignore[assignment] + handlers: Dict[Subject, Handler] + _publishers: Dict[Subject, Publisher] _producer: Optional[NatsFastProducer] _js_producer: Optional[NatsJSFastProducer] @@ -88,10 +100,7 @@ async def connect( ) -> Client: connection = await super().connect(*args, **kwargs) for p in self._publishers.values(): - if p.stream is not None: - p._producer = self._js_producer - else: - p._producer = self._producer + self.__set_publisher_producer(p) return connection async def _connect( @@ -141,8 +150,8 @@ async def _close( self.__is_connected = False async def start(self) -> None: - context.set_local( - "log_context", + context.set_global( + "default_log_context", self._get_log_context(None, ""), ) @@ -206,7 +215,7 @@ async def process_wrapper( message: StreamMessage[Msg], ) -> WrappedReturn[T_HandlerReturn]: async with WatcherContext(watcher, message): - r = await self._execute_handler(func, message) + r = await func(message) pub_response: Optional[AsyncPublisherProtocol] if message.reply_to: @@ -284,6 +293,7 @@ def subscriber( # type: ignore[override] # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **original_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -358,6 +368,7 @@ def subscriber( # type: ignore[override] extra_options=extra_options, title=title, description=description, + include_in_schema=include_in_schema, log_context_builder=partial( self._get_log_context, stream=stream.name if stream else "", @@ -406,6 +417,7 @@ def publisher( # type: ignore[override] title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: if (stream := stream_builder.stream(stream)) is not None: stream.subjects.append(subject) @@ -424,9 +436,11 @@ def publisher( # type: ignore[override] title=title, _description=description, _schema=schema, + include_in_schema=include_in_schema, ), ) super().publisher(subject, publisher) + self.__set_publisher_producer(publisher) return publisher @override @@ -437,12 +451,19 @@ async def publish( # type: ignore[override] **kwargs: Any, ) -> Optional[DecodedMessage]: if stream is None: - assert self._producer, "NatsBroker is not started yet" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 return await self._producer.publish(*args, **kwargs) else: - assert self._js_producer, "NatsBroker is not started yet" # nosec B101 + assert self._js_producer, NOT_CONNECTED_YET # nosec B101 return await self._js_producer.publish( *args, stream=stream, **kwargs, # type: ignore[misc] ) + + def __set_publisher_producer(self, publisher: Publisher) -> None: + if publisher.stream is not None: + if self._js_producer is not None: + publisher._producer = self._js_producer + elif self._producer is not None: + publisher._producer = self._producer diff --git a/faststream/nats/broker.pyi b/faststream/nats/broker.pyi index 5e3c44df6f..1c8349b3d6 100644 --- a/faststream/nats/broker.pyi +++ b/faststream/nats/broker.pyi @@ -59,8 +59,8 @@ class NatsBroker( ): stream: Optional[JetStreamContext] - handlers: Dict[Subject, Handler] # type: ignore[assignment] - _publishers: Dict[Subject, Publisher] # type: ignore[assignment] + handlers: Dict[Subject, Handler] + _publishers: Dict[Subject, Publisher] _producer: Optional[NatsFastProducer] _js_producer: Optional[NatsJSFastProducer] @@ -248,6 +248,7 @@ class NatsBroker( # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -267,6 +268,7 @@ class NatsBroker( title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: ... @override async def publish( # type: ignore[override] diff --git a/faststream/nats/fastapi.pyi b/faststream/nats/fastapi.pyi index d9da1afcd2..f3bcdc70a8 100644 --- a/faststream/nats/fastapi.pyi +++ b/faststream/nats/fastapi.pyi @@ -177,8 +177,7 @@ class NatsRouter(StreamRouter[Msg]): title: Optional[str] = None, description: Optional[str] = None, **__service_kwargs: Any, - ) -> Callable[[Msg, bool], Awaitable[T_HandlerReturn]]: - pass + ) -> Callable[[Msg, bool], Awaitable[T_HandlerReturn]]: ... @override def subscriber( # type: ignore[override] self, @@ -211,6 +210,7 @@ class NatsRouter(StreamRouter[Msg]): # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -230,6 +230,7 @@ class NatsRouter(StreamRouter[Msg]): title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: ... @overload def after_startup( diff --git a/faststream/nats/handler.py b/faststream/nats/handler.py index 1eeb33be87..0aee960e6c 100644 --- a/faststream/nats/handler.py +++ b/faststream/nats/handler.py @@ -50,6 +50,7 @@ def __init__( # AsyncAPI information description: Optional[str] = None, title: Optional[str] = None, + include_in_schema: bool = True, ): reg, path = compile_path(subject, replace_symbol="*") self.subject = path @@ -64,6 +65,7 @@ def __init__( super().__init__( log_context_builder=log_context_builder, description=description, + include_in_schema=include_in_schema, title=title, ) @@ -112,7 +114,11 @@ async def start(self, connection: Union[Client, JetStreamContext]) -> None: # t **self.extra_options, ) + await super().start() + async def close(self) -> None: + await super().close() + if self.subscription is not None: await self.subscription.unsubscribe() self.subscription = None diff --git a/faststream/nats/message.py b/faststream/nats/message.py index 223e913470..121d621a75 100644 --- a/faststream/nats/message.py +++ b/faststream/nats/message.py @@ -11,13 +11,20 @@ class NatsMessage(StreamMessage[Msg]): is_js: bool = True async def ack(self, **kwargs: Any) -> None: + await super().ack() if self.is_js and not self.raw_message._ackd: await self.raw_message.ack() async def nack(self, **kwargs: Any) -> None: + await super().nack() if self.is_js and not self.raw_message._ackd: await self.raw_message.nak(**kwargs) async def reject(self, **kwargs: Any) -> None: + await super().reject() if self.is_js and not self.raw_message._ackd: await self.raw_message.term() + + async def in_progress(self, **kwargs: Any) -> None: + if self.is_js and not self.raw_message._ackd: + await self.raw_message.in_progress() diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py index 75384eecfb..a285e7c072 100644 --- a/faststream/nats/parser.py +++ b/faststream/nats/parser.py @@ -20,7 +20,7 @@ async def parse_message( ) -> StreamMessage[Msg]: headers = message.header or {} - handler = context.get("handler_") + handler = context.get_local("handler_") path: AnyDict = {} path_re: Optional[Pattern[str]] if ( # pragma: no branch diff --git a/faststream/nats/publisher.py b/faststream/nats/publisher.py index b7c98d6bbd..3d66573145 100644 --- a/faststream/nats/publisher.py +++ b/faststream/nats/publisher.py @@ -5,6 +5,7 @@ from faststream._compat import override from faststream.broker.publisher import BasePublisher +from faststream.exceptions import NOT_CONNECTED_YET from faststream.nats.js_stream import JStream from faststream.nats.producer import NatsFastProducer, NatsJSFastProducer from faststream.types import AnyDict, DecodedMessage, SendableMessage @@ -31,8 +32,8 @@ async def publish( # type: ignore[override] headers: Optional[Dict[str, str]] = None, **producer_kwargs: Any, ) -> Optional[DecodedMessage]: - assert self._producer, "Please, setup `_producer` first" # nosec B101 - assert self.subject, "You have to specify outcome subject" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 + assert self.subject, "You have to specify outgoing subject" # nosec B101 extra: AnyDict = { "reply_to": reply_to or self.reply_to, diff --git a/faststream/nats/router.py b/faststream/nats/router.py index d984e21155..233d269097 100644 --- a/faststream/nats/router.py +++ b/faststream/nats/router.py @@ -32,6 +32,7 @@ def publisher( # type: ignore[override] title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: new_publisher = self._update_publisher_prefix( self.prefix, @@ -42,6 +43,11 @@ def publisher( # type: ignore[override] title=title, _description=description, _schema=schema, + include_in_schema=( + include_in_schema + if self.include_in_schema is None + else self.include_in_schema + ), ), ) publisher_key = self._get_publisher_key(new_publisher) diff --git a/faststream/nats/router.pyi b/faststream/nats/router.pyi index 50d5fa0d28..2cf9339a02 100644 --- a/faststream/nats/router.pyi +++ b/faststream/nats/router.pyi @@ -34,6 +34,7 @@ class NatsRouter(BaseRouter): middlewares: Optional[Sequence[Callable[[Msg], BaseMiddleware]]] = None, parser: Optional[CustomParser[Msg, NatsMessage]] = None, decoder: Optional[CustomDecoder[NatsMessage]] = None, + include_in_schema: bool = True, ): ... @override @staticmethod @@ -54,6 +55,7 @@ class NatsRouter(BaseRouter): title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, ) -> Publisher: ... @override def subscriber( # type: ignore[override] @@ -87,6 +89,7 @@ class NatsRouter(BaseRouter): # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], diff --git a/faststream/nats/shared/logging.py b/faststream/nats/shared/logging.py index 4b4a16cf4c..afbbd8db64 100644 --- a/faststream/nats/shared/logging.py +++ b/faststream/nats/shared/logging.py @@ -53,7 +53,7 @@ def fmt(self) -> str: + (f"%(stream)-{self._max_stream_len}s | " if self._max_stream_len else "") + (f"%(queue)-{self._max_queue_len}s | " if self._max_queue_len else "") + f"%(subject)-{self._max_subject_len}s | " - + "%(message_id)-10s " + + f"%(message_id)-{self._message_id_ln}s " "- %(message)s" ) diff --git a/faststream/nats/shared/router.pyi b/faststream/nats/shared/router.pyi index 67badceb58..4455bf074f 100644 --- a/faststream/nats/shared/router.pyi +++ b/faststream/nats/shared/router.pyi @@ -52,6 +52,7 @@ class NatsRoute: # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> None: ... diff --git a/faststream/rabbit/asyncapi.py b/faststream/rabbit/asyncapi.py index 5dbe09ec46..ce0ebba126 100644 --- a/faststream/rabbit/asyncapi.py +++ b/faststream/rabbit/asyncapi.py @@ -41,6 +41,9 @@ def name(self) -> str: ) def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + payloads = self.get_payloads() return { @@ -113,6 +116,9 @@ class Handler(LogicHandler): """ def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + payloads = self.get_payloads() handler_name = ( diff --git a/faststream/rabbit/broker.py b/faststream/rabbit/broker.py index 37160738f2..01557c5e3f 100644 --- a/faststream/rabbit/broker.py +++ b/faststream/rabbit/broker.py @@ -25,6 +25,7 @@ WrappedReturn, ) from faststream.broker.wrapper import FakePublisher, HandlerCallWrapper +from faststream.exceptions import NOT_CONNECTED_YET from faststream.rabbit.asyncapi import Handler, Publisher from faststream.rabbit.helpers import RabbitDeclarer from faststream.rabbit.message import RabbitMessage @@ -69,8 +70,9 @@ class RabbitBroker( _channel (Optional[aio_pika.RobustChannel]): The RabbitMQ channel instance. """ - handlers: Dict[int, Handler] # type: ignore[assignment] - _publishers: Dict[int, Publisher] # type: ignore[assignment] + url: str + handlers: Dict[int, Handler] + _publishers: Dict[int, Publisher] declarer: Optional[RabbitDeclarer] _producer: Optional[AioPikaFastProducer] @@ -132,7 +134,6 @@ def __init__( super().__init__( url=str(amqp_url), - protocol=amqp_url.scheme, protocol_version=protocol_version, security=security, ssl_context=security_args.get( @@ -267,15 +268,13 @@ async def start(self) -> None: Raises: RuntimeError: If the declarer is not initialized in the `connect` method. """ - context.set_local( - "log_context", + context.set_global( + "default_log_context", self._get_log_context(None, RabbitQueue(""), RabbitExchange("")), ) await super().start() - assert ( # nosec B101 - self.declarer - ), "Declarer should be initialized in `connect` method" + assert self.declarer, NOT_CONNECTED_YET # nosec B101 for publisher in self._publishers.values(): if publisher.exchange is not None: @@ -304,6 +303,7 @@ def subscriber( # type: ignore[override] # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **original_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -342,6 +342,7 @@ def subscriber( # type: ignore[override] description=description, title=title, virtual_host=self.virtual_host, + include_in_schema=include_in_schema, ), ) @@ -401,6 +402,7 @@ def publisher( # type: ignore[override] title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, priority: Optional[int] = None, **message_kwargs: Any, ) -> Publisher: @@ -440,6 +442,7 @@ def publisher( # type: ignore[override] _description=description, _schema=schema, virtual_host=self.virtual_host, + include_in_schema=include_in_schema, ) key = publisher._get_routing_hash() @@ -466,7 +469,7 @@ async def publish( # type: ignore[override] Union[aiormq.abc.ConfirmationFrameType, SendableMessage]: The confirmation frame or the response message. """ - assert self._producer, "RabbitBroker channel is not started yet" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 return await self._producer.publish(*args, **kwargs) def _process_message( @@ -509,7 +512,7 @@ async def process_wrapper( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ async with WatcherContext(watcher, message): - r = await self._execute_handler(func, message) + r = await func(message) pub_response: Optional[AsyncPublisherProtocol] if message.reply_to: @@ -539,9 +542,7 @@ async def declare_queue( Raises: RuntimeError: If the declarer is not initialized in the `connect` method. """ - assert ( # nosec B101 - self.declarer - ), "Declarer should be initialized in `connect` method" + assert self.declarer, NOT_CONNECTED_YET # nosec B101 return await self.declarer.declare_queue(queue) async def declare_exchange( @@ -560,7 +561,5 @@ async def declare_exchange( Raises: RuntimeError: If the declarer is not initialized in the `connect` method. """ - assert ( # nosec B101 - self.declarer - ), "Declarer should be initialized in `connect` method" + assert self.declarer, NOT_CONNECTED_YET # nosec B101 return await self.declarer.declare_exchange(exchange) diff --git a/faststream/rabbit/broker.pyi b/faststream/rabbit/broker.pyi index 7d114bef9b..40eb571e88 100644 --- a/faststream/rabbit/broker.pyi +++ b/faststream/rabbit/broker.pyi @@ -39,8 +39,8 @@ class RabbitBroker( RabbitLoggingMixin, BrokerAsyncUsecase[aio_pika.IncomingMessage, aio_pika.RobustConnection], ): - handlers: Dict[int, Handler] # type: ignore[assignment] - _publishers: Dict[int, Publisher] # type: ignore[assignment] + handlers: Dict[int, Handler] + _publishers: Dict[int, Publisher] declarer: Optional[RabbitDeclarer] _producer: Optional[AioPikaFastProducer] @@ -143,6 +143,7 @@ class RabbitBroker( # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -164,6 +165,7 @@ class RabbitBroker( title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, # message args headers: Optional[aio_pika.abc.HeadersType] = None, content_type: Optional[str] = None, diff --git a/faststream/rabbit/fastapi.pyi b/faststream/rabbit/fastapi.pyi index 017b0ed852..5d6068c0f3 100644 --- a/faststream/rabbit/fastapi.pyi +++ b/faststream/rabbit/fastapi.pyi @@ -99,8 +99,7 @@ class RabbitRouter(StreamRouter[IncomingMessage]): generate_unique_id_function: Callable[[APIRoute], str] = Default( generate_unique_id ), - ) -> None: - pass + ) -> None: ... def add_api_mq_route( # type: ignore[override] self, queue: Union[str, RabbitQueue], @@ -121,8 +120,7 @@ class RabbitRouter(StreamRouter[IncomingMessage]): title: Optional[str] = None, description: Optional[str] = None, **__service_kwargs: Any, - ) -> Callable[[IncomingMessage, bool], Awaitable[T_HandlerReturn]]: - pass + ) -> Callable[[IncomingMessage, bool], Awaitable[T_HandlerReturn]]: ... @override def subscriber( # type: ignore[override] self, @@ -142,6 +140,7 @@ class RabbitRouter(StreamRouter[IncomingMessage]): # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -163,6 +162,7 @@ class RabbitRouter(StreamRouter[IncomingMessage]): title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, # message args headers: Optional[aio_pika.abc.HeadersType] = None, content_type: Optional[str] = None, diff --git a/faststream/rabbit/handler.py b/faststream/rabbit/handler.py index accb5b4e93..49f73156d4 100644 --- a/faststream/rabbit/handler.py +++ b/faststream/rabbit/handler.py @@ -64,6 +64,7 @@ def __init__( # AsyncAPI information description: Optional[str] = None, title: Optional[str] = None, + include_in_schema: bool = True, virtual_host: str = "/", ): """Initialize a RabbitMQ consumer. @@ -82,6 +83,7 @@ def __init__( log_context_builder=log_context_builder, description=description, title=title, + include_in_schema=include_in_schema, ) self.queue = queue @@ -160,7 +162,11 @@ async def start(self, declarer: RabbitDeclarer) -> None: # type: ignore[overrid arguments=self.consume_args, ) + await super().start() + async def close(self) -> None: + await super().close() + if self._queue_obj is not None: if self._consumer_tag is not None: # pragma: no branch await self._queue_obj.cancel(self._consumer_tag) diff --git a/faststream/rabbit/message.py b/faststream/rabbit/message.py index 4cf644bd4d..5489d0a790 100644 --- a/faststream/rabbit/message.py +++ b/faststream/rabbit/message.py @@ -35,6 +35,7 @@ async def ack(self, **kwargs: Any) -> None: """ pika_message = self.raw_message + await super().ack() if ( pika_message._IncomingMessage__processed # type: ignore[attr-defined] or pika_message._IncomingMessage__no_ack # type: ignore[attr-defined] @@ -53,6 +54,7 @@ async def nack(self, **kwargs: Any) -> None: """ pika_message = self.raw_message + await super().nack() if ( pika_message._IncomingMessage__processed # type: ignore[attr-defined] or pika_message._IncomingMessage__no_ack # type: ignore[attr-defined] @@ -71,6 +73,7 @@ async def reject(self, **kwargs: Any) -> None: """ pika_message = self.raw_message + await super().reject() if ( pika_message._IncomingMessage__processed # type: ignore[attr-defined] or pika_message._IncomingMessage__no_ack # type: ignore[attr-defined] diff --git a/faststream/rabbit/parser.py b/faststream/rabbit/parser.py index f8890ed3da..0aea2447d3 100644 --- a/faststream/rabbit/parser.py +++ b/faststream/rabbit/parser.py @@ -108,14 +108,17 @@ def encode_message( The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - if not isinstance(message, aio_pika.Message): + if isinstance(message, aio_pika.Message): + return message + + else: message, content_type = encode_message(message) delivery_mode = ( DeliveryMode.PERSISTENT if persist else DeliveryMode.NOT_PERSISTENT ) - message = aio_pika.Message( + return aio_pika.Message( message, **{ "delivery_mode": delivery_mode, @@ -125,5 +128,3 @@ def encode_message( **message_kwargs, }, ) - - return message diff --git a/faststream/rabbit/producer.py b/faststream/rabbit/producer.py index a04fe1ce33..6d9073452a 100644 --- a/faststream/rabbit/producer.py +++ b/faststream/rabbit/producer.py @@ -1,9 +1,7 @@ -from contextlib import asynccontextmanager from types import TracebackType from typing import ( Any, AsyncContextManager, - AsyncIterator, Optional, Type, Union, @@ -30,7 +28,7 @@ from faststream.rabbit.shared.types import TimeoutType from faststream.rabbit.types import AioPikaSendableMessage from faststream.types import SendableMessage -from faststream.utils.functions import timeout_scope +from faststream.utils.functions import fake_context, timeout_scope class AioPikaFastProducer: @@ -146,7 +144,7 @@ async def publish( self.declarer.queues[RABBIT_REPLY], ) else: - context = _fake_context() + context = fake_context() async with context as response_queue: r = await self._publish( @@ -301,8 +299,3 @@ async def __aexit__( """ self.lock.release() await self.queue.cancel(self.consumer_tag) - - -@asynccontextmanager -async def _fake_context() -> AsyncIterator[None]: - yield None diff --git a/faststream/rabbit/publisher.py b/faststream/rabbit/publisher.py index 79353d4337..3e5f689c7f 100644 --- a/faststream/rabbit/publisher.py +++ b/faststream/rabbit/publisher.py @@ -5,6 +5,7 @@ from aio_pika import IncomingMessage from faststream._compat import override +from faststream.exceptions import NOT_CONNECTED_YET from faststream.rabbit.producer import AioPikaFastProducer from faststream.rabbit.shared.publisher import ABCPublisher from faststream.rabbit.shared.schemas import get_routing_hash @@ -66,7 +67,7 @@ async def publish( # type: ignore[override] The above docstring is autogenerated by docstring-gen library (https://docstring-gen.airt.ai) """ - assert self._producer, "Please, setup `_producer` first" # nosec B101 + assert self._producer, NOT_CONNECTED_YET # nosec B101 return await self._producer.publish( message=message, exchange=self.exchange, diff --git a/faststream/rabbit/publisher.pyi b/faststream/rabbit/publisher.pyi index 676b3f3cb2..63fb903a9f 100644 --- a/faststream/rabbit/publisher.pyi +++ b/faststream/rabbit/publisher.pyi @@ -17,10 +17,8 @@ class LogicPublisher(ABCPublisher[aio_pika.IncomingMessage]): _producer: Optional[AioPikaFastProducer] = field(default=None, init=False) @property - def routing(self) -> Optional[str]: - pass - def _get_routing_hash(self) -> int: - pass + def routing(self) -> Optional[str]: ... + def _get_routing_hash(self) -> int: ... @abstractproperty def name(self) -> str: raise NotImplementedError() diff --git a/faststream/rabbit/router.py b/faststream/rabbit/router.py index ab6a0ab3df..2ba75a5a46 100644 --- a/faststream/rabbit/router.py +++ b/faststream/rabbit/router.py @@ -80,6 +80,7 @@ def publisher( # type: ignore[override] title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, priority: Optional[int] = None, **message_kwargs: Any, ) -> Publisher: @@ -120,6 +121,11 @@ def publisher( # type: ignore[override] title=title, _description=description, _schema=schema, + include_in_schema=( + include_in_schema + if self.include_in_schema is None + else self.include_in_schema + ), ), ) key = self._get_publisher_key(new_publisher) diff --git a/faststream/rabbit/router.pyi b/faststream/rabbit/router.pyi index 1d2cdd2ce4..c41512706d 100644 --- a/faststream/rabbit/router.pyi +++ b/faststream/rabbit/router.pyi @@ -34,6 +34,7 @@ class RabbitRouter(BrokerRouter[int, aio_pika.IncomingMessage]): ] = None, parser: Optional[CustomParser[aio_pika.IncomingMessage, RabbitMessage]] = None, decoder: Optional[CustomDecoder[RabbitMessage]] = None, + include_in_schema: bool = True, ): ... @staticmethod @override @@ -68,6 +69,7 @@ class RabbitRouter(BrokerRouter[int, aio_pika.IncomingMessage]): # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> Callable[ [Callable[P_HandlerParams, T_HandlerReturn]], @@ -89,6 +91,7 @@ class RabbitRouter(BrokerRouter[int, aio_pika.IncomingMessage]): title: Optional[str] = None, description: Optional[str] = None, schema: Optional[Any] = None, + include_in_schema: bool = True, # message args headers: Optional[aio_pika.abc.HeadersType] = None, content_type: Optional[str] = None, diff --git a/faststream/rabbit/shared/logging.py b/faststream/rabbit/shared/logging.py index c308861a25..1aa171a887 100644 --- a/faststream/rabbit/shared/logging.py +++ b/faststream/rabbit/shared/logging.py @@ -98,7 +98,7 @@ def fmt(self) -> str: "%(asctime)s %(levelname)s - " f"%(exchange)-{self._max_exchange_len}s | " f"%(queue)-{self._max_queue_len}s | " - f"%(message_id)-10s " + f"%(message_id)-{self._message_id_ln}s " "- %(message)s" ) diff --git a/faststream/rabbit/shared/router.pyi b/faststream/rabbit/shared/router.pyi index ccf2bd8e87..2dbad67246 100644 --- a/faststream/rabbit/shared/router.pyi +++ b/faststream/rabbit/shared/router.pyi @@ -42,5 +42,6 @@ class RabbitRoute: # AsyncAPI information title: Optional[str] = None, description: Optional[str] = None, + include_in_schema: bool = True, **__service_kwargs: Any, ) -> None: ... diff --git a/faststream/rabbit/types.py b/faststream/rabbit/types.py index 251535b886..d25d93e801 100644 --- a/faststream/rabbit/types.py +++ b/faststream/rabbit/types.py @@ -2,6 +2,7 @@ import aio_pika +from faststream._compat import TypeAlias from faststream.rabbit.shared.types import TimeoutType from faststream.types import SendableMessage @@ -10,4 +11,4 @@ "AioPikaSendableMessage", ) -AioPikaSendableMessage = Union[aio_pika.Message, SendableMessage] +AioPikaSendableMessage: TypeAlias = Union[aio_pika.Message, SendableMessage] diff --git a/faststream/redis/__init__.py b/faststream/redis/__init__.py new file mode 100644 index 0000000000..5de9c487aa --- /dev/null +++ b/faststream/redis/__init__.py @@ -0,0 +1,20 @@ +from faststream.broker.test import TestApp +from faststream.redis.annotations import Redis, RedisMessage +from faststream.redis.broker import RedisBroker +from faststream.redis.router import RedisRouter +from faststream.redis.schemas import ListSub, PubSub, StreamSub +from faststream.redis.shared.router import RedisRoute +from faststream.redis.test import TestRedisBroker + +__all__ = ( + "Redis", + "RedisBroker", + "RedisMessage", + "RedisRoute", + "RedisRouter", + "TestRedisBroker", + "TestApp", + "PubSub", + "ListSub", + "StreamSub", +) diff --git a/faststream/redis/annotations.py b/faststream/redis/annotations.py new file mode 100644 index 0000000000..efa8937749 --- /dev/null +++ b/faststream/redis/annotations.py @@ -0,0 +1,20 @@ +from redis.asyncio.client import Redis as RedisClient + +from faststream._compat import Annotated +from faststream.annotations import ContextRepo, Logger, NoCast +from faststream.redis.broker import RedisBroker as RB +from faststream.redis.message import RedisMessage as RM +from faststream.utils.context import Context + +__all__ = ( + "Logger", + "ContextRepo", + "NoCast", + "RedisMessage", + "RedisBroker", + "Redis", +) + +RedisMessage = Annotated[RM, Context("message")] +RedisBroker = Annotated[RB, Context("broker")] +Redis = Annotated[RedisClient, Context("broker._connection")] diff --git a/faststream/redis/asyncapi.py b/faststream/redis/asyncapi.py new file mode 100644 index 0000000000..684e0d5679 --- /dev/null +++ b/faststream/redis/asyncapi.py @@ -0,0 +1,105 @@ +from typing import Dict + +from faststream.asyncapi.schema import ( + Channel, + ChannelBinding, + CorrelationId, + Message, + Operation, +) +from faststream.asyncapi.schema.bindings import redis +from faststream.asyncapi.utils import resolve_payloads +from faststream.redis.handler import LogicRedisHandler +from faststream.redis.publisher import LogicPublisher + + +class Handler(LogicRedisHandler): + @property + def name(self) -> str: + return self._title or f"{self.channel_name}:{self.call_name}" + + def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + + payloads = self.get_payloads() + + method = None + if self.list_sub is not None: + method = "lpop" + + elif (ch := self.channel) is not None: + if ch.pattern: + method = "psubscribe" + else: + method = "subscribe" + + elif (stream := self.stream_sub) is not None: + if stream.group: + method = "xreadgroup" + else: + method = "xread" + + return { + self.name: Channel( + description=self.description, + subscribe=Operation( + message=Message( + title=f"{self.name}:Message", + payload=resolve_payloads(payloads), + correlationId=CorrelationId( + location="$message.header#/correlation_id" + ), + ), + ), + bindings=ChannelBinding( + redis=redis.ChannelBinding( + channel=self.channel_name, + group_name=getattr(self.stream_sub, "group", None), + consumer_name=getattr(self.stream_sub, "consumer", None), + method=method, + ) + ), + ) + } + + +class Publisher(LogicPublisher): + def schema(self) -> Dict[str, Channel]: + if not self.include_in_schema: + return {} + + payloads = self.get_payloads() + + method = None + if self.list is not None: + method = "rpush" + elif self.channel is not None: + method = "publish" + elif self.stream is not None: + method = "xadd" + + return { + self.name: Channel( + description=self.description, + publish=Operation( + message=Message( + title=f"{self.name}:Message", + payload=resolve_payloads(payloads, "Publisher"), + correlationId=CorrelationId( + location="$message.header#/correlation_id" + ), + ), + ), + bindings=ChannelBinding( + redis=redis.ChannelBinding( + channel=self.channel_name, + method=method, + ) + ), + ) + } + + @property + def name(self) -> str: + return self.title or f"{self.channel_name}:Publisher" diff --git a/faststream/redis/broker.py b/faststream/redis/broker.py new file mode 100644 index 0000000000..a778125405 --- /dev/null +++ b/faststream/redis/broker.py @@ -0,0 +1,315 @@ +from functools import partial, wraps +from types import TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Optional, + Sequence, + Type, + Union, +) +from urllib.parse import urlparse + +from fast_depends.dependencies import Depends +from redis.asyncio.client import Redis +from redis.asyncio.connection import ConnectionPool, parse_url +from redis.exceptions import ResponseError + +from faststream._compat import TypeAlias, override +from faststream.broker.core.asyncronous import BrokerAsyncUsecase, default_filter +from faststream.broker.message import StreamMessage +from faststream.broker.middlewares import BaseMiddleware +from faststream.broker.push_back_watcher import BaseWatcher, WatcherContext +from faststream.broker.types import ( + AsyncPublisherProtocol, + CustomDecoder, + CustomParser, + Filter, + P_HandlerParams, + T_HandlerReturn, + WrappedReturn, +) +from faststream.broker.wrapper import FakePublisher, HandlerCallWrapper +from faststream.exceptions import NOT_CONNECTED_YET +from faststream.redis.asyncapi import Handler, Publisher +from faststream.redis.message import AnyRedisDict, RedisMessage +from faststream.redis.producer import RedisFastProducer +from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub +from faststream.redis.shared.logging import RedisLoggingMixin +from faststream.types import AnyDict, DecodedMessage +from faststream.utils.context.main import context + +Channel: TypeAlias = str + + +class RedisBroker( + RedisLoggingMixin, + BrokerAsyncUsecase[AnyRedisDict, "Redis[bytes]"], +): + url: str + handlers: Dict[int, Handler] + _publishers: Dict[int, Publisher] + + _producer: Optional[RedisFastProducer] + + def __init__( + self, + url: str = "redis://localhost:6379", + polling_interval: Optional[float] = None, + *, + protocol: Optional[str] = None, + protocol_version: Optional[str] = "custom", + **kwargs: Any, + ) -> None: + self.global_polling_interval = polling_interval + self._producer = None + + super().__init__( + url=url, + protocol_version=protocol_version, + **kwargs, + ) + + url_kwargs = urlparse(self.url) + self.protocol = protocol or url_kwargs.scheme + + async def connect( + self, + *args: Any, + **kwargs: Any, + ) -> "Redis[bytes]": + connection = await super().connect(*args, **kwargs) + for p in self._publishers.values(): + p._producer = self._producer + return connection + + @override + async def _connect( # type: ignore[override] + self, + url: str, + **kwargs: Any, + ) -> "Redis[bytes]": + url_options: AnyDict = parse_url(url) + url_options.update(kwargs) + pool = ConnectionPool(**url_options) + + client = Redis(connection_pool=pool) + self._producer = RedisFastProducer( + connection=client, + parser=self._global_parser, # type: ignore[arg-type] + decoder=self._global_parser, # type: ignore[arg-type] + ) + return client + + async def _close( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_val: Optional[BaseException] = None, + exec_tb: Optional[TracebackType] = None, + ) -> None: + if self._connection is not None: + await self._connection.aclose() # type: ignore[attr-defined] + + await super()._close(exc_type, exc_val, exec_tb) + + async def start(self) -> None: + context.set_global( + "default_log_context", + self._get_log_context(None, ""), + ) + + await super().start() + assert self._connection, NOT_CONNECTED_YET # nosec B101 + + for handler in self.handlers.values(): + if (stream := handler.stream_sub) is not None and stream.group: + try: + await self._connection.xgroup_create( + name=stream.name, + groupname=stream.group, + mkstream=True, + ) + except ResponseError as e: + if "already exists" not in str(e): + raise e + + c = self._get_log_context(None, handler.channel_name) + self._log(f"`{handler.call_name}` waiting for messages", extra=c) + await handler.start(self._connection) + + def _process_message( + self, + func: Callable[ + [StreamMessage[Any]], + Awaitable[T_HandlerReturn], + ], + watcher: BaseWatcher, + ) -> Callable[[StreamMessage[Any]], Awaitable[WrappedReturn[T_HandlerReturn]],]: + @wraps(func) + async def process_wrapper( + message: StreamMessage[Any], + ) -> WrappedReturn[T_HandlerReturn]: + async with WatcherContext( + watcher, + message, + redis=self._connection, + ): + r = await func(message) + + pub_response: Optional[AsyncPublisherProtocol] + if message.reply_to: + pub_response = FakePublisher( + partial(self.publish, channel=message.reply_to) + ) + else: + pub_response = None + + return r, pub_response + + return process_wrapper + + @override + def subscriber( # type: ignore[override] + self, + channel: Union[Channel, PubSub, None] = None, + *, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + # broker arguments + dependencies: Sequence[Depends] = (), + parser: Optional[CustomParser[AnyRedisDict, RedisMessage]] = None, + decoder: Optional[CustomDecoder[RedisMessage]] = None, + middlewares: Optional[ + Sequence[Callable[[AnyRedisDict], BaseMiddleware]] + ] = None, + filter: Filter[RedisMessage] = default_filter, + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + include_in_schema: bool = True, + **original_kwargs: Any, + ) -> Callable[ + [Callable[P_HandlerParams, T_HandlerReturn]], + HandlerCallWrapper[Any, P_HandlerParams, T_HandlerReturn], + ]: + channel = PubSub.validate(channel) + list = ListSub.validate(list) + stream = StreamSub.validate(stream) + + if (any_of := channel or list or stream) is None: + raise ValueError( + "You should specify `channel`, `list`, `stream` subscriber type" + ) + + if all((channel, list)): + raise ValueError("You can't use `PubSub` and `ListSub` both") + elif all((channel, stream)): + raise ValueError("You can't use `PubSub` and `StreamSub` both") + elif all((list, stream)): + raise ValueError("You can't use `ListSub` and `StreamSub` both") + + self._setup_log_context(channel=any_of.name) + super().subscriber() + + key = Handler.get_routing_hash(any_of) + handler = self.handlers[key] = self.handlers.get( + key, + Handler( # type: ignore[abstract] + log_context_builder=partial( + self._get_log_context, + channel=any_of.name, + ), + # Redis + channel=channel, + list=list, + stream=stream, + # AsyncAPI + title=title, + description=description, + include_in_schema=include_in_schema, + ), + ) + + def consumer_wrapper( + func: Callable[P_HandlerParams, T_HandlerReturn], + ) -> HandlerCallWrapper[AnyRedisDict, P_HandlerParams, T_HandlerReturn,]: + handler_call, dependant = self._wrap_handler( + func, + extra_dependencies=dependencies, + **original_kwargs, + ) + + handler.add_call( + handler=handler_call, + filter=filter, + middlewares=middlewares, + parser=parser or self._global_parser, # type: ignore[arg-type] + decoder=decoder or self._global_decoder, # type: ignore[arg-type] + dependant=dependant, + ) + + return handler_call + + return consumer_wrapper + + @override + def publisher( # type: ignore[override] + self, + channel: Union[Channel, PubSub, None] = None, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + headers: Optional[AnyDict] = None, + reply_to: str = "", + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + schema: Optional[Any] = None, + include_in_schema: bool = True, + ) -> Publisher: + channel = PubSub.validate(channel) + list = ListSub.validate(list) + stream = StreamSub.validate(stream) + + any_of = channel or list or stream + if any_of is None: + raise ValueError(INCORRECT_SETUP_MSG) + + key = Handler.get_routing_hash(any_of) + publisher = self._publishers.get( + key, + Publisher( + channel=channel, + list=list, + stream=stream, + headers=headers, + reply_to=reply_to, + # AsyncAPI + title=title, + _description=description, + _schema=schema, + include_in_schema=include_in_schema, + ), + ) + super().publisher(key, publisher) + if self._producer is not None: + publisher._producer = self._producer + return publisher + + @override + async def publish( # type: ignore[override] + self, + *args: Any, + **kwargs: Any, + ) -> Optional[DecodedMessage]: + assert self._producer, NOT_CONNECTED_YET # nosec B101 + return await self._producer.publish(*args, **kwargs) + + async def publish_batch( + self, + *args: Any, + **kwargs: Any, + ) -> None: + assert self._producer, NOT_CONNECTED_YET # nosec B101 + return await self._producer.publish_batch(*args, **kwargs) diff --git a/faststream/redis/broker.pyi b/faststream/redis/broker.pyi new file mode 100644 index 0000000000..b0bcfcc14e --- /dev/null +++ b/faststream/redis/broker.pyi @@ -0,0 +1,214 @@ +import logging +from types import TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Type, + Union, +) + +from fast_depends.dependencies import Depends +from redis.asyncio.client import Redis +from redis.asyncio.connection import BaseParser, Connection, DefaultParser, Encoder + +from faststream._compat import TypeAlias, override +from faststream.asyncapi import schema as asyncapi +from faststream.broker.core.asyncronous import BrokerAsyncUsecase, default_filter +from faststream.broker.message import StreamMessage +from faststream.broker.middlewares import BaseMiddleware +from faststream.broker.push_back_watcher import BaseWatcher +from faststream.broker.types import ( + CustomDecoder, + CustomParser, + Filter, + P_HandlerParams, + T_HandlerReturn, + WrappedReturn, +) +from faststream.broker.wrapper import HandlerCallWrapper +from faststream.log import access_logger +from faststream.redis.asyncapi import Handler, Publisher +from faststream.redis.message import AnyRedisDict, RedisMessage +from faststream.redis.producer import RedisFastProducer +from faststream.redis.schemas import ListSub, PubSub, StreamSub +from faststream.redis.shared.logging import RedisLoggingMixin +from faststream.types import AnyDict, DecodedMessage, SendableMessage + +Channel: TypeAlias = str + +class RedisBroker( + RedisLoggingMixin, + BrokerAsyncUsecase[AnyRedisDict, "Redis[bytes]"], +): + url: str + handlers: Dict[int, Handler] + _publishers: Dict[int, Publisher] + + _producer: Optional[RedisFastProducer] + + def __init__( + self, + url: str = "redis://localhost:6379", + polling_interval: Optional[float] = None, + *, + host: str = "localhost", + port: Union[str, int] = 6379, + username: Optional[str] = None, + password: Optional[str] = None, + db: Union[str, int] = 0, + client_name: Optional[str] = None, + health_check_interval: float = 0, + max_connections: Optional[int] = None, + socket_timeout: Optional[float] = None, + socket_connect_timeout: Optional[float] = None, + socket_read_size: int = 65536, + socket_keepalive: bool = False, + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, + socket_type: int = 0, + retry_on_timeout: bool = False, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: Type[BaseParser] = DefaultParser, + connection_class: Type[Connection] = Connection, + encoder_class: Type[Encoder] = Encoder, + # AsyncAPI args + asyncapi_url: Optional[str] = None, + protocol: Optional[str] = None, + protocol_version: Optional[str] = "custom", + description: Optional[str] = None, + tags: Optional[Sequence[asyncapi.Tag]] = None, + # logging args + logger: Optional[logging.Logger] = access_logger, + log_level: int = logging.INFO, + log_fmt: Optional[str] = None, + ) -> None: ... + async def connect( + self, + url: str = "redis://localhost:6379", + host: str = "localhost", + port: Union[str, int] = 6379, + username: Optional[str] = None, + password: Optional[str] = None, + db: Union[str, int] = 0, + client_name: Optional[str] = None, + health_check_interval: float = 0, + max_connections: Optional[int] = None, + socket_timeout: Optional[float] = None, + socket_connect_timeout: Optional[float] = None, + socket_read_size: int = 65536, + socket_keepalive: bool = False, + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, + socket_type: int = 0, + retry_on_timeout: bool = False, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: Type[BaseParser] = DefaultParser, + connection_class: Type[Connection] = Connection, + encoder_class: Type[Encoder] = Encoder, + ) -> "Redis[bytes]": ... + @override + async def _connect( # type: ignore[override] + self, + url: str, + host: str = "localhost", + port: Union[str, int] = 6379, + username: Optional[str] = None, + password: Optional[str] = None, + db: Union[str, int] = 0, + client_name: Optional[str] = None, + health_check_interval: float = 0, + max_connections: Optional[int] = None, + socket_timeout: Optional[float] = None, + socket_connect_timeout: Optional[float] = None, + socket_read_size: int = 65536, + socket_keepalive: bool = False, + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, + socket_type: int = 0, + retry_on_timeout: bool = False, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: Type[BaseParser] = DefaultParser, + connection_class: Type[Connection] = Connection, + encoder_class: Type[Encoder] = Encoder, + ) -> "Redis[bytes]": ... + async def _close( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_val: Optional[BaseException] = None, + exec_tb: Optional[TracebackType] = None, + ) -> None: ... + async def start(self) -> None: ... + def _process_message( + self, + func: Callable[ + [StreamMessage[Any]], + Awaitable[T_HandlerReturn], + ], + watcher: BaseWatcher, + ) -> Callable[[StreamMessage[Any]], Awaitable[WrappedReturn[T_HandlerReturn]],]: ... + @override + def subscriber( # type: ignore[override] + self, + channel: Union[Channel, PubSub, None] = None, + *, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + # broker arguments + dependencies: Sequence[Depends] = (), + parser: Optional[CustomParser[AnyRedisDict, RedisMessage]] = None, + decoder: Optional[CustomDecoder[RedisMessage]] = None, + middlewares: Optional[ + Sequence[Callable[[AnyRedisDict], BaseMiddleware]] + ] = None, + filter: Filter[RedisMessage] = default_filter, + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + include_in_schema: bool = True, + **__service_kwargs: Any, + ) -> Callable[ + [Callable[P_HandlerParams, T_HandlerReturn]], + HandlerCallWrapper[Any, P_HandlerParams, T_HandlerReturn], + ]: ... + @override + def publisher( # type: ignore[override] + self, + channel: Union[Channel, PubSub, None] = None, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + headers: Optional[AnyDict] = None, + reply_to: str = "", + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + schema: Optional[Any] = None, + include_in_schema: bool = True, + ) -> Publisher: ... + @override + async def publish( # type: ignore[override] + self, + message: SendableMessage, + channel: Optional[str] = None, + reply_to: str = "", + headers: Optional[AnyDict] = None, + correlation_id: Optional[str] = None, + *, + list: Optional[str] = None, + stream: Optional[str] = None, + rpc: bool = False, + rpc_timeout: Optional[float] = 30.0, + raise_timeout: bool = False, + ) -> Optional[DecodedMessage]: ... + async def publish_batch( + self, + *msgs: SendableMessage, + list: str, + ) -> None: ... diff --git a/faststream/redis/fastapi.py b/faststream/redis/fastapi.py new file mode 100644 index 0000000000..b4902860a9 --- /dev/null +++ b/faststream/redis/fastapi.py @@ -0,0 +1,17 @@ +from faststream._compat import override +from faststream.broker.fastapi.router import StreamRouter +from faststream.redis.broker import RedisBroker +from faststream.redis.message import AnyRedisDict + + +class RedisRouter(StreamRouter[AnyRedisDict]): + broker_class = RedisBroker + + @override + @staticmethod + def _setup_log_context( # type: ignore[override] + main_broker: RedisBroker, + including_broker: RedisBroker, + ) -> None: + for h in including_broker.handlers.values(): + main_broker._setup_log_context(h.channel_name) diff --git a/faststream/redis/fastapi.pyi b/faststream/redis/fastapi.pyi new file mode 100644 index 0000000000..d1db3b967f --- /dev/null +++ b/faststream/redis/fastapi.pyi @@ -0,0 +1,152 @@ +import logging +from enum import Enum +from typing import ( + Any, + Callable, + Dict, + List, + Mapping, + Optional, + Sequence, + Type, + Union, +) + +from fast_depends.dependencies import Depends +from fastapi import params +from fastapi.datastructures import Default +from fastapi.routing import APIRoute +from fastapi.utils import generate_unique_id +from redis.asyncio.connection import BaseParser, Connection, DefaultParser, Encoder +from starlette import routing +from starlette.responses import JSONResponse, Response +from starlette.types import ASGIApp, Lifespan + +from faststream._compat import TypeAlias, override +from faststream.asyncapi import schema as asyncapi +from faststream.broker.core.asyncronous import default_filter +from faststream.broker.fastapi.router import StreamRouter +from faststream.broker.middlewares import BaseMiddleware +from faststream.broker.types import ( + CustomDecoder, + CustomParser, + Filter, + P_HandlerParams, + T_HandlerReturn, +) +from faststream.broker.wrapper import HandlerCallWrapper +from faststream.log import access_logger +from faststream.redis.asyncapi import Publisher +from faststream.redis.broker import RedisBroker +from faststream.redis.message import AnyRedisDict, RedisMessage +from faststream.redis.schemas import ListSub, PubSub, StreamSub +from faststream.types import AnyDict + +Channel: TypeAlias = str + +class RedisRouter(StreamRouter[AnyRedisDict]): + broker_class = RedisBroker + + def __init__( + self, + url: str = "redis://localhost:6379", + polling_interval: Optional[float] = None, + *, + host: str = "localhost", + port: Union[str, int] = 6379, + username: Optional[str] = None, + password: Optional[str] = None, + db: Union[str, int] = 0, + client_name: Optional[str] = None, + health_check_interval: float = 0, + max_connections: Optional[int] = None, + socket_timeout: Optional[float] = None, + socket_connect_timeout: Optional[float] = None, + socket_read_size: int = 65536, + socket_keepalive: bool = False, + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, + socket_type: int = 0, + retry_on_timeout: bool = False, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: Type[BaseParser] = DefaultParser, + connection_class: Type[Connection] = Connection, + encoder_class: Type[Encoder] = Encoder, + # AsyncAPI args + asyncapi_url: Optional[str] = None, + protocol: Optional[str] = None, + protocol_version: Optional[str] = "custom", + description: Optional[str] = None, + asyncapi_tags: Optional[Sequence[asyncapi.Tag]] = None, + schema_url: Optional[str] = "/asyncapi", + setup_state: bool = True, + # logging args + logger: Optional[logging.Logger] = access_logger, + log_level: int = logging.INFO, + log_fmt: Optional[str] = None, + # FastAPI kwargs + prefix: str = "", + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + default_response_class: Type[Response] = Default(JSONResponse), + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + callbacks: Optional[List[routing.BaseRoute]] = None, + routes: Optional[List[routing.BaseRoute]] = None, + redirect_slashes: bool = True, + default: Optional[ASGIApp] = None, + dependency_overrides_provider: Optional[Any] = None, + route_class: Type[APIRoute] = APIRoute, + on_startup: Optional[Sequence[Callable[[], Any]]] = None, + on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + lifespan: Optional[Lifespan[Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: ... + @override + @staticmethod + def _setup_log_context( # type: ignore[override] + main_broker: RedisBroker, + including_broker: RedisBroker, + ) -> None: ... + @override + def subscriber( # type: ignore[override] + self, + channel: Union[Channel, PubSub, None] = None, + *, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + # broker arguments + dependencies: Sequence[Depends] = (), + parser: Optional[CustomParser[AnyRedisDict, RedisMessage]] = None, + decoder: Optional[CustomDecoder[RedisMessage]] = None, + middlewares: Optional[ + Sequence[Callable[[AnyRedisDict], BaseMiddleware]] + ] = None, + filter: Filter[RedisMessage] = default_filter, + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + include_in_schema: bool = True, + **__service_kwargs: Any, + ) -> Callable[ + [Callable[P_HandlerParams, T_HandlerReturn]], + HandlerCallWrapper[Any, P_HandlerParams, T_HandlerReturn], + ]: ... + @override + def publisher( # type: ignore[override] + self, + channel: Union[Channel, PubSub, None] = None, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + headers: Optional[AnyDict] = None, + reply_to: str = "", + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + schema: Optional[Any] = None, + include_in_schema: bool = True, + ) -> Publisher: ... diff --git a/faststream/redis/handler.py b/faststream/redis/handler.py new file mode 100644 index 0000000000..14f5aa47ca --- /dev/null +++ b/faststream/redis/handler.py @@ -0,0 +1,313 @@ +import asyncio +import json +from contextlib import suppress +from functools import partial +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Generator, + Hashable, + List, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +import anyio +from fast_depends.core import CallModel +from redis.asyncio.client import PubSub as RPubSub +from redis.asyncio.client import Redis + +from faststream._compat import override +from faststream.broker.handler import AsyncHandler +from faststream.broker.message import StreamMessage +from faststream.broker.middlewares import BaseMiddleware +from faststream.broker.parsers import resolve_custom_func +from faststream.broker.types import ( + CustomDecoder, + CustomParser, + Filter, + P_HandlerParams, + T_HandlerReturn, +) +from faststream.broker.wrapper import HandlerCallWrapper +from faststream.redis.message import ( + AnyRedisDict, + RedisMessage, +) +from faststream.redis.parser import RawMessage, RedisParser, bDATA_KEY +from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub +from faststream.types import AnyDict + + +class LogicRedisHandler(AsyncHandler[AnyRedisDict]): + subscription: Optional[RPubSub] + task: Optional["asyncio.Task[Any]"] + + def __init__( + self, + *, + log_context_builder: Callable[[StreamMessage[Any]], Dict[str, str]], + # Redis info + channel: Optional[PubSub] = None, + list: Optional[ListSub] = None, + stream: Optional[StreamSub] = None, + last_id: str = "$", + # AsyncAPI information + description: Optional[str] = None, + title: Optional[str] = None, + include_in_schema: bool = True, + ): + self.channel = channel + self.list_sub = list + self.stream_sub = stream + + self.subscription = None + self.task = None + + self.last_id = last_id + + super().__init__( + log_context_builder=log_context_builder, + description=description, + title=title, + include_in_schema=include_in_schema, + ) + + @property + def channel_name(self) -> str: + any_of = self.channel or self.list_sub or self.stream_sub + assert any_of, INCORRECT_SETUP_MSG # nosec B101 + return any_of.name + + def add_call( + self, + *, + handler: HandlerCallWrapper[AnyDict, P_HandlerParams, T_HandlerReturn], + dependant: CallModel[P_HandlerParams, T_HandlerReturn], + parser: Optional[CustomParser[AnyDict, RedisMessage]], + decoder: Optional[CustomDecoder[RedisMessage]], + filter: Filter[RedisMessage], + middlewares: Optional[Sequence[Callable[[AnyDict], BaseMiddleware]]], + ) -> None: + super().add_call( + handler=handler, + parser=resolve_custom_func(parser, RedisParser.parse_message), + decoder=resolve_custom_func(decoder, RedisParser.decode_message), + filter=filter, # type: ignore[arg-type] + dependant=dependant, + middlewares=middlewares, + ) + + @override + async def start(self, client: "Redis[bytes]") -> None: # type: ignore[override] + self.started = anyio.Event() + + consume: Union[ + Callable[[], Awaitable[Optional[AnyRedisDict]]], + Callable[[], Awaitable[Optional[Sequence[AnyRedisDict]]]], + ] + sleep: float + + if (list_sub := self.list_sub) is not None: + sleep = list_sub.polling_interval + consume = partial( + self._consume_list_msg, + client=client, + ) + self.started.set() + + elif (channel := self.channel) is not None: + self.subscription = psub = client.pubsub() + + if channel.pattern: + await psub.psubscribe(channel.name) + else: + await psub.subscribe(channel.name) + + consume = partial( + psub.get_message, + ignore_subscribe_messages=True, + timeout=channel.polling_interval, + ) + sleep = 0.01 + self.started.set() + + elif self.stream_sub is not None: + consume = partial( # type: ignore[assignment] + self._consume_stream_msg, + client=client, + ) + sleep = 0.01 + + else: + raise AssertionError("unreachable") + + await super().start() + self.task = asyncio.create_task(self._consume(consume, sleep)) + # wait until Stream starts to consume + await anyio.sleep(0.01) + await self.started.wait() + + async def close(self) -> None: + await super().close() + + if self.task is not None: + if not self.task.done(): + self.task.cancel() + self.task = None + + if self.subscription is not None: + await self.subscription.unsubscribe() + await self.subscription.aclose() # type: ignore[attr-defined] + self.subscription = None + + @staticmethod + def get_routing_hash(channel: Hashable) -> int: + return hash(channel) + + async def _consume( + self, + consume: Union[ + Callable[[], Awaitable[Optional[AnyRedisDict]]], + Callable[[], Awaitable[Optional[Sequence[AnyRedisDict]]]], + ], + sleep: float, + ) -> None: + connected = True + + while self.running: + with suppress(Exception): + try: + m = await consume() + + except Exception: + if connected is True: + connected = False + await anyio.sleep(5) + + else: + if connected is False: + connected = True + + if msgs := ( + (m,) if isinstance(m, dict) else m + ): # pragma: no branch + for i in msgs: + await self.consume(i) + + finally: + await anyio.sleep(sleep) + + async def _consume_stream_msg( + self, + client: "Redis[bytes]", + ) -> Union[None, AnyRedisDict, Generator[AnyRedisDict, None, None]]: + stream = self.stream_sub + assert stream # nosec B101 + + if stream.group and stream.consumer: + read = client.xreadgroup( + groupname=stream.group, + consumername=stream.consumer, + streams={stream.name: ">"}, + block=stream.polling_interval, + noack=stream.no_ack, + ) + + else: + read = client.xread( + {stream.name: self.last_id}, + block=stream.polling_interval, + ) + + self.started.set() + + for stream_name, msgs in cast( + Tuple[Tuple[bytes, Tuple[Tuple[bytes, AnyDict], ...]], ...], + await read, + ): + if msgs: + self.last_id = msgs[-1][0].decode() + + if stream.batch: + parsed: List[Any] = [] + ids = [] + for message_id, msg in msgs: + ids.append(message_id.decode()) + + m = msg.get(bDATA_KEY, msg) + try: + data, _ = RedisParser.parse_one_msg(m) + data = json.loads(data) + except Exception: + data = m + parsed.append(data) + + return AnyRedisDict( + type="batch", + channel=stream_name, + data=parsed, + message_id=ids[0], + message_ids=ids, + ) + + else: + return ( + AnyRedisDict( + type="stream", + channel=stream_name, + data=msg.get( + bDATA_KEY, + RawMessage.encode(message=msg).encode(), + ), + message_id=message_id.decode(), + message_ids=[message_id.decode()], + ) + for message_id, msg in msgs + ) + + return None + + async def _consume_list_msg( + self, + client: "Redis[bytes]", + ) -> Optional[AnyRedisDict]: + list_sub = self.list_sub + assert list_sub # nosec B101 + + count = list_sub.records + + msg = await client.lpop(name=list_sub.name, count=count) + + if msg: + if count is not None: + parsed: List[Any] = [] + for m in msg: + try: + data, _ = RedisParser.parse_one_msg(m) + data = json.loads(data) + except Exception: + data = m + parsed.append(data) + msg = parsed + + if count is None: + return AnyRedisDict( + type="list", + channel=list_sub.name.encode(), + data=msg, + ) + + else: + return AnyRedisDict( + type="batch", + channel=list_sub.name.encode(), + data=msg, + ) + + return None diff --git a/faststream/redis/message.py b/faststream/redis/message.py new file mode 100644 index 0000000000..8601ae8df4 --- /dev/null +++ b/faststream/redis/message.py @@ -0,0 +1,65 @@ +from typing import Any, List, Literal, Optional, TypeVar, Union + +from redis.asyncio import Redis + +from faststream._compat import NotRequired, TypedDict, override +from faststream.broker.message import StreamMessage +from faststream.utils.context.main import context + + +class PubSubMessage(TypedDict): + channel: bytes + data: Union[bytes, List[bytes]] + type: str + message_id: NotRequired[str] + message_ids: NotRequired[List[str]] + + +class OneMessage(PubSubMessage): + type: Literal["stream", "list", "message"] # type: ignore[misc] + data: bytes # type: ignore[misc] + pattern: NotRequired[Optional[bytes]] + + +class BatchMessage(PubSubMessage): + type: Literal["batch"] # type: ignore[misc] + data: List[bytes] # type: ignore[misc] + + +class AnyRedisDict(PubSubMessage): + type: Literal["stream", "list", "message", "batch"] # type: ignore[misc] + data: Union[bytes, List[bytes]] # type: ignore[misc] + pattern: NotRequired[Optional[bytes]] + + +MsgType = TypeVar("MsgType", OneMessage, BatchMessage, AnyRedisDict) + + +class RedisAckMixin(StreamMessage[MsgType]): + @override + async def ack( # type: ignore[override] + self, + redis: "Redis[bytes]", + **kwargs: Any, + ) -> None: + if ( + not self.commited + and (ids := self.raw_message.get("message_ids")) + and (handler := context.get_local("handler_")) + and (stream := handler.stream_sub) + and (group := stream.group) + ): + await redis.xack(self.raw_message["channel"], group, *ids) # type: ignore[no-untyped-call] + await super().ack() + + +class RedisMessage(RedisAckMixin[AnyRedisDict]): + pass + + +class OneRedisMessage(RedisAckMixin[OneMessage]): + pass + + +class BatchRedisMessage(RedisAckMixin[BatchMessage]): + pass diff --git a/faststream/redis/parser.py b/faststream/redis/parser.py new file mode 100644 index 0000000000..4226d39dfe --- /dev/null +++ b/faststream/redis/parser.py @@ -0,0 +1,156 @@ +from typing import Optional, Pattern, Tuple, Union, overload +from uuid import uuid4 + +from pydantic import BaseModel, Field + +from faststream._compat import dump_json, model_parse, model_to_json +from faststream.broker.parsers import decode_message, encode_message +from faststream.redis.message import ( + BatchMessage, + BatchRedisMessage, + OneMessage, + OneRedisMessage, +) +from faststream.types import AnyDict, DecodedMessage, SendableMessage +from faststream.utils.context.main import context + +DATA_KEY = "__data__" +bDATA_KEY = DATA_KEY.encode() + + +class RawMessage(BaseModel): + data: bytes + headers: AnyDict = Field(default_factory=dict) + + @classmethod + def build( + cls, + message: SendableMessage, + reply_to: str = "", + headers: Optional[AnyDict] = None, + correlation_id: Optional[str] = None, + ) -> "RawMessage": + payload, content_type = encode_message(message) + + headers_to_send = { + "correlation_id": correlation_id or str(uuid4()), + } + + if content_type: + headers_to_send["content-type"] = content_type + + if reply_to: + headers_to_send["reply_to"] = reply_to + + if headers is not None: + headers_to_send.update(headers) + + return cls( + data=payload, + headers=headers_to_send, + ) + + @classmethod + def encode( + cls, + message: SendableMessage, + reply_to: str = "", + headers: Optional[AnyDict] = None, + correlation_id: Optional[str] = None, + ) -> str: + return model_to_json( + cls.build( + message=message, + reply_to=reply_to, + headers=headers, + correlation_id=correlation_id, + ) + ) + + +class RedisParser: + @classmethod + @overload + async def parse_message( + cls, + message: OneMessage, + ) -> OneRedisMessage: + pass + + @classmethod + @overload + async def parse_message( + cls, + message: BatchMessage, + ) -> BatchRedisMessage: + pass + + @classmethod + async def parse_message( + cls, + message: Union[OneMessage, BatchMessage], + ) -> Union[OneRedisMessage, BatchRedisMessage]: + id_ = str(uuid4()) + + if message["type"] == "batch": + data = dump_json( + [cls.parse_one_msg(x)[0] for x in message["data"]] + ).encode() + + return BatchRedisMessage( + raw_message=message, + body=data, + content_type="application/json", + message_id=id_, + correlation_id=id_, + ) + + else: + data, headers = cls.parse_one_msg(message["data"]) + + channel = message.get("channel", b"").decode() + + handler = context.get_local("handler_") + path_re: Optional[Pattern[str]] + path: AnyDict = {} + if ( + handler + and handler.channel is not None + and (path_re := handler.channel.path_regex) is not None + ): + if path_re is not None: + match = path_re.match(channel) + if match: + path = match.groupdict() + + return OneRedisMessage( + raw_message=message, + body=data, + path=path, + headers=headers, + reply_to=headers.get("reply_to", ""), + content_type=headers.get("content-type", ""), + message_id=message.get("message_id", id_), + correlation_id=headers.get("correlation_id", id_), + ) + + @staticmethod + def parse_one_msg(raw_data: bytes) -> Tuple[bytes, AnyDict]: + try: + obj = model_parse(RawMessage, raw_data) + except Exception: + # Raw Redis message format + data = raw_data + headers: AnyDict = {} + else: + # FastStream message format + data = obj.data + headers = obj.headers + + return data, headers + + @staticmethod + async def decode_message( + msg: OneRedisMessage, + ) -> DecodedMessage: + return decode_message(msg) diff --git a/faststream/redis/producer.py b/faststream/redis/producer.py new file mode 100644 index 0000000000..b6db441fae --- /dev/null +++ b/faststream/redis/producer.py @@ -0,0 +1,148 @@ +from typing import Any, Optional, Union, overload +from uuid import uuid4 + +from redis.asyncio.client import PubSub, Redis + +from faststream.broker.parsers import encode_message, resolve_custom_func +from faststream.broker.types import ( + AsyncCustomDecoder, + AsyncCustomParser, + AsyncDecoder, + AsyncParser, +) +from faststream.exceptions import WRONG_PUBLISH_ARGS +from faststream.redis.message import ( + AnyRedisDict, + BatchMessage, + BatchRedisMessage, + OneMessage, + OneRedisMessage, +) +from faststream.redis.parser import DATA_KEY, RawMessage, RedisParser +from faststream.redis.schemas import INCORRECT_SETUP_MSG +from faststream.types import AnyDict, DecodedMessage, SendableMessage +from faststream.utils.functions import timeout_scope + + +class RedisFastProducer: + _connection: "Redis[bytes]" + _decoder: AsyncDecoder[Any] + _parser: AsyncParser[AnyRedisDict, Any] + + @overload + def __init__( + self, + connection: "Redis[bytes]", + parser: Optional[AsyncCustomParser[OneMessage, OneRedisMessage]], + decoder: Optional[AsyncCustomDecoder[OneRedisMessage]], + ) -> None: + pass + + @overload + def __init__( + self, + connection: "Redis[bytes]", + parser: Optional[AsyncCustomParser[BatchMessage, BatchRedisMessage]], + decoder: Optional[AsyncCustomDecoder[BatchRedisMessage]], + ) -> None: + pass + + def __init__( + self, + connection: "Redis[bytes]", + parser: Union[ + None, + AsyncCustomParser[OneMessage, OneRedisMessage], + AsyncCustomParser[BatchMessage, BatchRedisMessage], + ], + decoder: Union[ + None, + AsyncCustomDecoder[OneRedisMessage], + AsyncCustomDecoder[BatchRedisMessage], + ], + ) -> None: + self._connection = connection + self._parser = resolve_custom_func( + parser, # type: ignore[arg-type,assignment] + RedisParser.parse_message, + ) + self._decoder = resolve_custom_func(decoder, RedisParser.decode_message) + + async def publish( + self, + message: SendableMessage, + channel: Optional[str] = None, + reply_to: str = "", + headers: Optional[AnyDict] = None, + correlation_id: Optional[str] = None, + *, + list: Optional[str] = None, + stream: Optional[str] = None, + rpc: bool = False, + rpc_timeout: Optional[float] = 30.0, + raise_timeout: bool = False, + ) -> Optional[DecodedMessage]: + if not any((channel, list, stream)): + raise ValueError(INCORRECT_SETUP_MSG) + + psub: Optional[PubSub] = None + if rpc is True: + if reply_to: + raise WRONG_PUBLISH_ARGS + + reply_to = str(uuid4()) + psub = self._connection.pubsub() + await psub.subscribe(reply_to) + + msg = RawMessage.encode( + message=message, + reply_to=reply_to, + headers=headers, + correlation_id=correlation_id, + ) + + if channel is not None: + await self._connection.publish(channel, msg) + elif list is not None: + await self._connection.rpush(list, msg) + elif stream is not None: + await self._connection.xadd(stream, {DATA_KEY: msg}) + else: + raise AssertionError("unreachable") + + if psub is None: + return None + + else: + m = None + with timeout_scope(rpc_timeout, raise_timeout): + # skip subscribe message + await psub.get_message( + ignore_subscribe_messages=True, + timeout=rpc_timeout or 0.0, + ) + + # get real response + m = await psub.get_message( + ignore_subscribe_messages=True, + timeout=rpc_timeout or 0.0, + ) + + await psub.unsubscribe() + await psub.aclose() # type: ignore[attr-defined] + + if m is None: + if raise_timeout: + raise TimeoutError() + else: + return None + else: + return await self._decoder(await self._parser(m)) + + async def publish_batch( + self, + *msgs: SendableMessage, + list: str, + ) -> None: + batch = (encode_message(msg)[0] for msg in msgs) + await self._connection.rpush(list, *batch) diff --git a/faststream/redis/publisher.py b/faststream/redis/publisher.py new file mode 100644 index 0000000000..857374ed21 --- /dev/null +++ b/faststream/redis/publisher.py @@ -0,0 +1,77 @@ +from dataclasses import dataclass, field +from typing import Optional, Union + +from faststream._compat import override +from faststream.broker.publisher import BasePublisher +from faststream.exceptions import NOT_CONNECTED_YET +from faststream.redis.message import AnyRedisDict +from faststream.redis.producer import RedisFastProducer +from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub +from faststream.types import AnyDict, DecodedMessage, SendableMessage + + +@dataclass +class LogicPublisher(BasePublisher[AnyRedisDict]): + channel: Optional[PubSub] = field(default=None) + list: Optional[ListSub] = field(default=None) + stream: Optional[StreamSub] = field(default=None) + reply_to: str = field(default="") + headers: Optional[AnyDict] = field(default=None) + + _producer: Optional[RedisFastProducer] = field(default=None, init=False) + + @override + async def publish( # type: ignore[override] + self, + message: SendableMessage, + channel: Union[str, PubSub, None] = None, + reply_to: str = "", + headers: Optional[AnyDict] = None, + correlation_id: Optional[str] = None, + *, + list: Union[str, ListSub, None] = None, + stream: Union[str, StreamSub, None] = None, + rpc: bool = False, + rpc_timeout: Optional[float] = 30.0, + raise_timeout: bool = False, + ) -> Optional[DecodedMessage]: + assert self._producer, NOT_CONNECTED_YET # nosec B101 + + channel = PubSub.validate(channel or self.channel) + list = ListSub.validate(list or self.list) + stream = StreamSub.validate(stream or self.stream) + + assert any( + (channel, list, stream) + ), "You have to specify outgoing channel" # nosec B101 + + headers_to_send = (self.headers or {}).copy() + if headers is not None: + headers_to_send.update(headers) + + if getattr(list, "batch", False): + await self._producer.publish_batch( + *message, + list=list.name, # type: ignore[union-attr] + ) + return None + + else: + return await self._producer.publish( + message=message, + channel=getattr(channel, "name", None), + list=getattr(list, "name", None), + stream=getattr(stream, "name", None), + reply_to=reply_to or self.reply_to, + correlation_id=correlation_id, + headers=headers_to_send, + rpc=rpc, + rpc_timeout=rpc_timeout, + raise_timeout=raise_timeout, + ) + + @property + def channel_name(self) -> str: + any_of = self.channel or self.list or self.stream + assert any_of, INCORRECT_SETUP_MSG # nosec B101 + return any_of.name diff --git a/faststream/redis/router.py b/faststream/redis/router.py new file mode 100644 index 0000000000..bd76fbac64 --- /dev/null +++ b/faststream/redis/router.py @@ -0,0 +1,82 @@ +from typing import Any, Dict, Optional, Union + +from faststream._compat import model_copy, override +from faststream.redis.asyncapi import Handler, Publisher +from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub +from faststream.redis.shared.router import RedisRouter as BaseRouter +from faststream.types import AnyDict + + +class RedisRouter(BaseRouter): + _publishers: Dict[int, Publisher] # type: ignore[assignment] + + @override + @staticmethod + def _get_publisher_key(publisher: Publisher) -> int: # type: ignore[override] + any_of = publisher.channel or publisher.list or publisher.stream + if any_of is None: + raise ValueError(INCORRECT_SETUP_MSG) + return Handler.get_routing_hash(any_of) + + @override + @staticmethod + def _update_publisher_prefix( # type: ignore[override] + prefix: str, + publisher: Publisher, + ) -> Publisher: + if publisher.channel is not None: + publisher.channel = model_copy( + publisher.channel, update={"name": prefix + publisher.channel.name} + ) + elif publisher.list is not None: + publisher.list = model_copy( + publisher.list, update={"name": prefix + publisher.list.name} + ) + elif publisher.stream is not None: + publisher.stream = model_copy( + publisher.stream, update={"name": prefix + publisher.stream.name} + ) + else: + raise AssertionError("unreachable") + return publisher + + @override + def publisher( # type: ignore[override] + self, + channel: Union[str, PubSub, None] = None, + list: Union[str, ListSub, None] = None, + stream: Union[str, StreamSub, None] = None, + headers: Optional[AnyDict] = None, + reply_to: str = "", + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + schema: Optional[Any] = None, + include_in_schema: bool = True, + ) -> Publisher: + if not any((stream, list, channel)): + raise ValueError(INCORRECT_SETUP_MSG) + + new_publisher = self._update_publisher_prefix( + self.prefix, + Publisher( + channel=PubSub.validate(channel), + list=ListSub.validate(list), + stream=StreamSub.validate(stream), + reply_to=reply_to, + headers=headers, + title=title, + _description=description, + _schema=schema, + include_in_schema=( + include_in_schema + if self.include_in_schema is None + else self.include_in_schema + ), + ), + ) + publisher_key = self._get_publisher_key(new_publisher) + publisher = self._publishers[publisher_key] = self._publishers.get( + publisher_key, new_publisher + ) + return publisher diff --git a/faststream/redis/router.pyi b/faststream/redis/router.pyi new file mode 100644 index 0000000000..3cd71e3243 --- /dev/null +++ b/faststream/redis/router.pyi @@ -0,0 +1,93 @@ +from typing import ( + Any, + Callable, + Dict, + Optional, + Sequence, + Union, +) + +from fast_depends.dependencies import Depends + +from faststream._compat import override +from faststream.broker.core.asyncronous import default_filter +from faststream.broker.middlewares import BaseMiddleware +from faststream.broker.router import BrokerRouter +from faststream.broker.types import ( + CustomDecoder, + CustomParser, + Filter, + P_HandlerParams, + T_HandlerReturn, +) +from faststream.broker.wrapper import HandlerCallWrapper +from faststream.redis.asyncapi import Publisher +from faststream.redis.message import AnyRedisDict, RedisMessage +from faststream.redis.schemas import ListSub, PubSub, StreamSub +from faststream.redis.shared.router import RedisRoute +from faststream.types import AnyDict + +class RedisRouter(BrokerRouter[int, AnyRedisDict]): + _publishers: Dict[int, Publisher] # type: ignore[assignment] + + def __init__( + self, + prefix: str = "", + handlers: Sequence[RedisRoute] = (), + *, + dependencies: Sequence[Depends] = (), + parser: Optional[CustomParser[AnyRedisDict, RedisMessage]] = None, + decoder: Optional[CustomDecoder[RedisMessage]] = None, + middlewares: Optional[ + Sequence[Callable[[AnyRedisDict], BaseMiddleware]] + ] = None, + include_in_schema: bool = True, + ): ... + @override + @staticmethod + def _get_publisher_key(publisher: Publisher) -> int: # type: ignore[override] + ... + @override + @staticmethod + def _update_publisher_prefix( # type: ignore[override] + prefix: str, + publisher: Publisher, + ) -> Publisher: ... + @override + def subscriber( # type: ignore[override] + self, + channel: Union[str, PubSub, None] = None, + *, + list: Union[str, ListSub, None] = None, + stream: Union[str, StreamSub, None] = None, + # broker arguments + dependencies: Sequence[Depends] = (), + parser: Optional[CustomParser[AnyRedisDict, RedisMessage]] = None, + decoder: Optional[CustomDecoder[RedisMessage]] = None, + middlewares: Optional[ + Sequence[Callable[[AnyRedisDict], BaseMiddleware]] + ] = None, + filter: Filter[RedisMessage] = default_filter, + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + include_in_schema: bool = True, + **__service_kwargs: Any, + ) -> Callable[ + [Callable[P_HandlerParams, T_HandlerReturn]], + HandlerCallWrapper[Any, P_HandlerParams, T_HandlerReturn], + ]: ... + @override + def publisher( # type: ignore[override] + self, + channel: Union[str, PubSub, None] = None, + list: Union[str, ListSub, None] = None, + stream: Union[str, StreamSub, None] = None, + headers: Optional[AnyDict] = None, + reply_to: str = "", + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + schema: Optional[Any] = None, + include_in_schema: bool = True, + ) -> Publisher: ... diff --git a/faststream/redis/schemas.py b/faststream/redis/schemas.py new file mode 100644 index 0000000000..1245ddf5cf --- /dev/null +++ b/faststream/redis/schemas.py @@ -0,0 +1,113 @@ +from typing import Optional, Pattern + +from pydantic import Field, PositiveFloat, PositiveInt + +from faststream._compat import PYDANTIC_V2 +from faststream.broker.schemas import NameRequired +from faststream.utils.context.path import compile_path + + +class PubSub(NameRequired): + polling_interval: PositiveFloat = 1.0 + path_regex: Optional[Pattern[str]] = None + pattern: bool = False + + def __init__( + self, + channel: str, + pattern: bool = False, + polling_interval: PositiveFloat = 1.0, + ) -> None: + reg, path = compile_path(channel, replace_symbol="*") + + if reg is not None: + pattern = True + + super().__init__( + name=path, + path_regex=reg, + pattern=pattern, + polling_interval=polling_interval, + ) + + if PYDANTIC_V2: + model_config = {"arbitrary_types_allowed": True} + else: + + class Config: + arbitrary_types_allowed = True + + def __hash__(self) -> int: + return hash("pubsub" + self.name) + + +class ListSub(NameRequired): + polling_interval: PositiveFloat = 0.1 + batch: bool = False + max_records: PositiveInt = 10 + + def __init__( + self, + channel: str, + batch: bool = False, + max_records: PositiveInt = 10, + polling_interval: PositiveFloat = 0.1, + ) -> None: + super().__init__( + name=channel, + batch=batch, + max_records=max_records, + polling_interval=polling_interval, + ) + + @property + def records(self) -> Optional[PositiveInt]: + return self.max_records if self.batch else None + + def __hash__(self) -> int: + return hash("list" + self.name) + + +class StreamSub(NameRequired): + polling_interval: Optional[PositiveInt] = Field(default=100, description="ms") + group: Optional[str] = None + consumer: Optional[str] = None + batch: bool = False + no_ack: bool = False + + def __init__( + self, + stream: str, + polling_interval: Optional[PositiveInt] = 100, + group: Optional[str] = None, + consumer: Optional[str] = None, + batch: bool = False, + no_ack: bool = False, + ) -> None: + """ + Redis Stream subscriber parameters + + Args: + stream: (str): Redis Stream name. + polling_interval: (int:ms | None): wait message block. + group: (str | None): consumer group name. + consumer: (str | None): consumer name. + batch: (bool): consume messages in batches. + no_ack: (bool): do not add message to PEL. + """ + if (group and not consumer) or (not group and consumer): + raise ValueError("You should specify `group` and `consumer` both") + + super().__init__( + name=stream, + group=group, + consumer=consumer, + polling_interval=polling_interval, + batch=batch, + ) + + def __hash__(self) -> int: + return hash("stream" + self.name) + + +INCORRECT_SETUP_MSG = "You have to specify `channel`, `list` or `stream`" diff --git a/faststream/redis/shared/__init__.py b/faststream/redis/shared/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/faststream/redis/shared/logging.py b/faststream/redis/shared/logging.py new file mode 100644 index 0000000000..2c013b9439 --- /dev/null +++ b/faststream/redis/shared/logging.py @@ -0,0 +1,56 @@ +import logging +from typing import Any, Optional + +from faststream._compat import override +from faststream.broker.core.mixins import LoggingMixin +from faststream.broker.message import StreamMessage +from faststream.log import access_logger +from faststream.types import AnyDict + + +class RedisLoggingMixin(LoggingMixin): + _max_channel_name: int + + def __init__( + self, + *args: Any, + logger: Optional[logging.Logger] = access_logger, + log_level: int = logging.INFO, + log_fmt: Optional[str] = None, + **kwargs: Any, + ) -> None: + super().__init__( + *args, + logger=logger, + log_level=log_level, + log_fmt=log_fmt, + **kwargs, + ) + self._message_id_ln = 15 + self._max_channel_name = 4 + + @override + def _get_log_context( # type: ignore[override] + self, + message: Optional[StreamMessage[Any]], + channel: str, + ) -> AnyDict: + return { + "channel": channel, + **super()._get_log_context(message), + } + + @property + def fmt(self) -> str: + return self._fmt or ( + "%(asctime)s %(levelname)s - " + f"%(channel)-{self._max_channel_name}s | " + f"%(message_id)-{self._message_id_ln}s - %(message)s" + ) + + def _setup_log_context( + self, + channel: Optional[str] = None, + ) -> None: + if channel is not None: + self._max_channel_name = max((self._max_channel_name, len(channel))) diff --git a/faststream/redis/shared/router.py b/faststream/redis/shared/router.py new file mode 100644 index 0000000000..7315eeca1e --- /dev/null +++ b/faststream/redis/shared/router.py @@ -0,0 +1,71 @@ +from typing import Any, Callable, Sequence, Union + +from faststream._compat import TypeAlias, model_copy, override +from faststream.broker.router import BrokerRoute as RedisRoute +from faststream.broker.router import BrokerRouter +from faststream.broker.types import P_HandlerParams, T_HandlerReturn +from faststream.broker.wrapper import HandlerCallWrapper +from faststream.redis.message import AnyRedisDict +from faststream.redis.schemas import ListSub, PubSub, StreamSub +from faststream.types import SendableMessage + +__all__ = ( + "RedisRouter", + "RedisRoute", +) + + +Channel: TypeAlias = str + + +class RedisRouter(BrokerRouter[int, AnyRedisDict]): + def __init__( + self, + prefix: str = "", + handlers: Sequence[RedisRoute[AnyRedisDict, SendableMessage]] = (), + **kwargs: Any, + ): + for h in handlers: + if not (channel := h.kwargs.pop("channel", None)): + if list := h.kwargs.pop("list", None): + h.kwargs["list"] = prefix + list + continue + + elif stream := h.kwargs.pop("stream", None): + h.kwargs["stream"] = prefix + stream + continue + + channel, h.args = h.args[0], h.args[1:] + + h.args = (prefix + channel, *h.args) + + super().__init__(prefix, handlers, **kwargs) + + @override + def subscriber( # type: ignore[override] + self, + channel: Union[Channel, PubSub, None] = None, + *, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + **broker_kwargs: Any, + ) -> Callable[ + [Callable[P_HandlerParams, T_HandlerReturn]], + HandlerCallWrapper[AnyRedisDict, P_HandlerParams, T_HandlerReturn], + ]: + channel = PubSub.validate(channel) + list = ListSub.validate(list) + stream = StreamSub.validate(stream) + + return self._wrap_subscriber( + channel=model_copy(channel, update={"name": self.prefix + channel.name}) + if channel + else None, + list=model_copy(list, update={"name": self.prefix + list.name}) + if list + else None, + stream=model_copy(stream, update={"name": self.prefix + stream.name}) + if stream + else None, + **broker_kwargs, + ) diff --git a/faststream/redis/shared/router.pyi b/faststream/redis/shared/router.pyi new file mode 100644 index 0000000000..9248364be7 --- /dev/null +++ b/faststream/redis/shared/router.pyi @@ -0,0 +1,48 @@ +from typing import ( + Any, + Callable, + Optional, + Sequence, + Union, +) + +from fast_depends.dependencies import Depends + +from faststream._compat import TypeAlias +from faststream.broker.core.asyncronous import default_filter +from faststream.broker.middlewares import BaseMiddleware +from faststream.broker.types import ( + CustomDecoder, + CustomParser, + Filter, + T_HandlerReturn, +) +from faststream.redis.message import AnyRedisDict, RedisMessage +from faststream.redis.schemas import ListSub, PubSub, StreamSub + +Channel: TypeAlias = str + +class RedisRoute: + """Delayed `RedisBroker.subscriber()` registration object""" + + def __init__( + self, + call: Callable[..., T_HandlerReturn], + channel: Union[Channel, PubSub, None] = None, + *, + list: Union[Channel, ListSub, None] = None, + stream: Union[Channel, StreamSub, None] = None, + # broker arguments + dependencies: Sequence[Depends] = (), + parser: Optional[CustomParser[AnyRedisDict, RedisMessage]] = None, + decoder: Optional[CustomDecoder[RedisMessage]] = None, + middlewares: Optional[ + Sequence[Callable[[AnyRedisDict], BaseMiddleware]] + ] = None, + filter: Filter[RedisMessage] = default_filter, + # AsyncAPI information + title: Optional[str] = None, + description: Optional[str] = None, + include_in_schema: bool = True, + **__service_kwargs: Any, + ) -> None: ... diff --git a/faststream/redis/test.py b/faststream/redis/test.py new file mode 100644 index 0000000000..02b95927c8 --- /dev/null +++ b/faststream/redis/test.py @@ -0,0 +1,163 @@ +import re +from typing import Any, Optional + +from faststream._compat import override +from faststream.broker.test import TestBroker, call_handler +from faststream.broker.wrapper import HandlerCallWrapper +from faststream.redis.asyncapi import Handler, Publisher +from faststream.redis.broker import RedisBroker +from faststream.redis.message import AnyRedisDict +from faststream.redis.parser import RawMessage +from faststream.redis.producer import RedisFastProducer +from faststream.redis.schemas import INCORRECT_SETUP_MSG +from faststream.types import AnyDict, DecodedMessage, SendableMessage + +__all__ = ("TestRedisBroker",) + + +class TestRedisBroker(TestBroker[RedisBroker]): + @staticmethod + def patch_publisher( + broker: RedisBroker, + publisher: Any, + ) -> None: + publisher._producer = broker._producer + + @staticmethod + def create_publisher_fake_subscriber( + broker: RedisBroker, + publisher: Publisher, + ) -> HandlerCallWrapper[Any, Any, Any]: + @broker.subscriber( + channel=publisher.channel, + list=publisher.list, + stream=publisher.stream, + _raw=True, + ) + def f(msg: Any) -> None: + pass + + return f + + @staticmethod + async def _fake_connect( + broker: RedisBroker, + *args: Any, + **kwargs: Any, + ) -> None: + broker._producer = FakeProducer(broker) # type: ignore[assignment] + + @staticmethod + def remove_publisher_fake_subscriber( + broker: RedisBroker, + publisher: Publisher, + ) -> None: + any_of = publisher.channel or publisher.list or publisher.stream + assert any_of # nosec B101 + broker.handlers.pop(Handler.get_routing_hash(any_of), None) + + +class FakeProducer(RedisFastProducer): + def __init__(self, broker: RedisBroker): + self.broker = broker + + @override + async def publish( + self, + message: SendableMessage, + channel: Optional[str] = None, + reply_to: str = "", + headers: Optional[AnyDict] = None, + correlation_id: Optional[str] = None, + *, + list: Optional[str] = None, + stream: Optional[str] = None, + rpc: bool = False, + rpc_timeout: Optional[float] = 30.0, + raise_timeout: bool = False, + ) -> Optional[DecodedMessage]: + any_of = channel or list or stream + if any_of is None: + raise ValueError(INCORRECT_SETUP_MSG) + + for handler in self.broker.handlers.values(): # pragma: no branch + call = False + batch = False + + if channel and (ch := handler.channel) is not None: + call = bool( + (not ch.pattern and ch.name == channel) + or ( + ch.pattern + and re.match( + ch.name.replace(".", "\\.").replace("*", ".*"), + channel, + ) + ) + ) + + if list and (ls := handler.list_sub) is not None: + batch = ls.batch + call = list == ls.name + + if stream and (st := handler.stream_sub) is not None: + batch = st.batch + call = stream == st.name + + if call: + r = await call_handler( + handler=handler, + message=build_message( + message=[message] if batch else message, + channel=any_of, + headers=headers, + correlation_id=correlation_id, + reply_to=reply_to, + ), + rpc=rpc, + rpc_timeout=rpc_timeout, + raise_timeout=raise_timeout, + ) + + if rpc: # pragma: no branch + return r + + return None + + async def publish_batch( + self, + *msgs: SendableMessage, + list: str, + ) -> None: + for handler in self.broker.handlers.values(): # pragma: no branch + if handler.list_sub and handler.list_sub.name == list: + await call_handler( + handler=handler, + message=build_message( + message=msgs, + channel=list, + ), + ) + + return None + + +def build_message( + message: SendableMessage, + channel: str, + *, + reply_to: str = "", + correlation_id: Optional[str] = None, + headers: Optional[AnyDict] = None, +) -> AnyRedisDict: + data = RawMessage.encode( + message=message, + reply_to=reply_to, + headers=headers, + correlation_id=correlation_id, + ) + return AnyRedisDict( + channel=channel.encode(), + data=data.encode(), + type="message", + ) diff --git a/faststream/types.py b/faststream/types.py index bdaf952b38..f3df07241f 100644 --- a/faststream/types.py +++ b/faststream/types.py @@ -1,21 +1,31 @@ from datetime import datetime -from typing import Any, Awaitable, Callable, Dict, List, Sequence, TypeVar, Union +from typing import ( + Any, + AsyncContextManager, + Awaitable, + Callable, + Dict, + List, + Sequence, + TypeVar, + Union, +) from pydantic import BaseModel -from faststream._compat import ParamSpec +from faststream._compat import ParamSpec, TypeAlias -AnyDict = Dict[str, Any] +AnyDict: TypeAlias = Dict[str, Any] F_Return = TypeVar("F_Return") F_Spec = ParamSpec("F_Spec") -AnyCallable = Callable[..., Any] -NoneCallable = Callable[..., None] -AsyncFunc = Callable[..., Awaitable[Any]] +AnyCallable: TypeAlias = Callable[..., Any] +NoneCallable: TypeAlias = Callable[..., None] +AsyncFunc: TypeAlias = Callable[..., Awaitable[Any]] -DecoratedCallable = AnyCallable -DecoratedCallableNone = NoneCallable +DecoratedCallable: TypeAlias = AnyCallable +DecoratedCallableNone: TypeAlias = NoneCallable JsonDecodable = Union[ float, @@ -24,12 +34,16 @@ str, bytes, ] -DecodedMessage = Union[Dict[str, JsonDecodable], Sequence[JsonDecodable], JsonDecodable] -SendableMessage = Union[ +DecodedMessage: TypeAlias = Union[ + Dict[str, JsonDecodable], Sequence[JsonDecodable], JsonDecodable +] +SendableMessage: TypeAlias = Union[ datetime, DecodedMessage, BaseModel, None, ] -SettingField = Union[bool, str, List[str]] +SettingField: TypeAlias = Union[bool, str, List[str]] + +Lifespan: TypeAlias = Callable[..., AsyncContextManager[None]] diff --git a/faststream/utils/ast.py b/faststream/utils/ast.py new file mode 100644 index 0000000000..fe6d2e6f07 --- /dev/null +++ b/faststream/utils/ast.py @@ -0,0 +1,53 @@ +import ast +import traceback +from functools import lru_cache +from pathlib import Path +from typing import Iterator, List, Optional, Union, cast + + +def is_contains_context_name(scip_name: str, name: str) -> bool: + stack = traceback.extract_stack()[-3] + tree = read_source_ast(stack.filename) + node = cast(Union[ast.With, ast.AsyncWith], find_ast_node(tree, stack.lineno)) + context_calls = get_withitem_calls(node) + + try: + pos = context_calls.index(scip_name) + except ValueError: + pos = 1 + + return name in context_calls[pos:] + + +@lru_cache +def read_source_ast(filename: str) -> ast.Module: + return ast.parse(Path(filename).read_text()) + + +def find_ast_node(module: ast.Module, lineno: Optional[int]) -> Optional[ast.AST]: + if lineno is not None: + for i in getattr(module, "body", ()): + if i.lineno == lineno: + return cast(ast.AST, i) + + r = find_ast_node(i, lineno) + if r is not None: + return r + + return None + + +def find_withitems(node: Union[ast.With, ast.AsyncWith]) -> Iterator[ast.withitem]: + if isinstance(node, (ast.With, ast.AsyncWith)): + yield from node.items + + for i in getattr(node, "body", ()): + yield from find_withitems(i) + + +def get_withitem_calls(node: Union[ast.With, ast.AsyncWith]) -> List[str]: + return [ + id + for i in find_withitems(node) + if (id := getattr(i.context_expr.func, "id", None)) # type: ignore[attr-defined] + ] diff --git a/faststream/utils/functions.py b/faststream/utils/functions.py index 86b90aa0ef..e93a457647 100644 --- a/faststream/utils/functions.py +++ b/faststream/utils/functions.py @@ -1,8 +1,20 @@ import inspect +from contextlib import asynccontextmanager from functools import wraps -from typing import Awaitable, Callable, ContextManager, List, Optional, Union, overload +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + ContextManager, + List, + Optional, + Union, + overload, +) import anyio +from fast_depends.core import CallModel from fast_depends.utils import run_async as call_or_await from faststream.types import AnyCallable, F_Return, F_Spec @@ -127,3 +139,15 @@ def timeout_scope( scope = anyio.move_on_after return scope(timeout) + + +@asynccontextmanager +async def fake_context(*args: Any, **kwargs: Any) -> AsyncIterator[None]: + yield None + + +def drop_response_type( + model: CallModel[F_Spec, F_Return] +) -> CallModel[F_Spec, F_Return]: + model.response_model = None + return model diff --git a/pyproject.toml b/pyproject.toml index 0ae09da4a1..2224506988 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ authors = [ { name = "lancetnik", email = "diementros@gmail.com" }, ] -keywords = ["rabbitmq", "kafka", "framework", "nats", "message brokers"] +keywords = ["rabbitmq", "kafka", "framework", "nats", "redis", "message brokers"] requires-python = ">=3.8" @@ -46,10 +46,9 @@ classifiers = [ dynamic = ["version"] dependencies = [ - "fast-depends>=2.2.0,<3.0.0", - "watchfiles>=0.20.0,<0.30.0", - "typer>=0.9.0,<1.0.0", - "uvloop>=0.14.0,!=0.15.0,!=0.15.1; sys_platform != 'win32' and (sys_platform != 'cygwin' and platform_python_implementation != 'PyPy')", + "fast-depends>=2.2.0,<3", + "typer>=0.9,<1", + "uvloop>=0.18.0; sys_platform != 'win32' and (sys_platform != 'cygwin' and platform_python_implementation != 'PyPy')", ] [project.optional-dependencies] @@ -63,12 +62,16 @@ kafka = [ ] nats = [ - "nats-py>=2.3.1,<3" + "nats-py>=2.3.1,<3.0.0" +] + +redis = [ + "redis>=5.0.0,<6.0.0" ] docs = [ - "fastapi>=0.100,<1", - "uvicorn>=0.17", + "fastapi>=0.100.0,<1.0.0", + "uvicorn>=0.17.0,<1.0.0", ] # dev dependencies @@ -91,6 +94,7 @@ lint = [ "types-PyYAML", "types-setuptools", "types-ujson", + "types-redis", "types-Pygments", "types-docutils", "mypy==1.7.1", @@ -115,10 +119,12 @@ testing = [ "pydantic-settings==2.1.0", "httpx==0.25.2", "PyYAML==6.0.1", + "watchfiles==0.21.0", + "email-validator==2.1.0.post1", ] dev = [ - "faststream[rabbit,kafka,nats,docs,lint,testing,devdocs]", + "faststream[rabbit,kafka,nats,docs,redis,lint,testing,devdocs]", "pre-commit==3.5.0", "detect-secrets==1.4.0", ] @@ -219,6 +225,7 @@ markers = [ "rabbit", "kafka", "nats", + "redis", "slow", "all", ] diff --git a/tests/asyncapi/base/publisher.py b/tests/asyncapi/base/publisher.py index e77b8c9985..03c8652b1d 100644 --- a/tests/asyncapi/base/publisher.py +++ b/tests/asyncapi/base/publisher.py @@ -120,3 +120,15 @@ def test_with_schema(self): payload = schema["components"]["schemas"] for v in payload.values(): assert v["type"] == "integer" + + def test_not_include(self): + broker = self.broker_class() + + @broker.publisher("test", include_in_schema=False) + @broker.subscriber("in-test", include_in_schema=False) + async def handler(msg: str): + pass + + schema = get_app_schema(self.build_app(broker)) + + assert schema.channels == {} diff --git a/tests/asyncapi/base/router.py b/tests/asyncapi/base/router.py index a07ae57246..5364160e79 100644 --- a/tests/asyncapi/base/router.py +++ b/tests/asyncapi/base/router.py @@ -28,3 +28,18 @@ async def handle(msg): payload = schema["components"]["schemas"] key = list(payload.keys())[0] assert payload[key]["title"] == key == "Handle:Message:Payload" + + def test_not_include(self): + broker = self.broker_class() + router = self.router_class(include_in_schema=False) + + @router.subscriber("test") + @router.publisher("test") + async def handle(msg): + ... + + broker.include_router(router) + + schema = get_app_schema(FastStream(broker)) + + assert schema.channels == {} diff --git a/tests/asyncapi/nats/test_fastapi.py b/tests/asyncapi/nats/test_fastapi.py index 081d2f3192..40e6838957 100644 --- a/tests/asyncapi/nats/test_fastapi.py +++ b/tests/asyncapi/nats/test_fastapi.py @@ -1,7 +1,7 @@ from typing import Type -from faststream.kafka.test import TestKafkaBroker from faststream.nats.fastapi import NatsRouter +from faststream.nats.test import TestNatsBroker from tests.asyncapi.base.arguments import FastAPICompatible from tests.asyncapi.base.fastapi import FastAPITestCase from tests.asyncapi.base.publisher import PublisherTestcase @@ -9,7 +9,7 @@ class TestRouterArguments(FastAPITestCase, FastAPICompatible): broker_class: Type[NatsRouter] = NatsRouter - broker_wrapper = staticmethod(TestKafkaBroker) + broker_wrapper = staticmethod(TestNatsBroker) def build_app(self, router): return router diff --git a/tests/asyncapi/redis/__init__.py b/tests/asyncapi/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/asyncapi/redis/test_arguments.py b/tests/asyncapi/redis/test_arguments.py new file mode 100644 index 0000000000..c6b7a24ba2 --- /dev/null +++ b/tests/asyncapi/redis/test_arguments.py @@ -0,0 +1,91 @@ +from faststream.asyncapi.generate import get_app_schema +from faststream.redis import RedisBroker, StreamSub +from tests.asyncapi.base.arguments import ArgumentsTestcase + + +class TestArguments(ArgumentsTestcase): + broker_class = RedisBroker + + def test_channel_subscriber(self): + broker = self.broker_class() + + @broker.subscriber("test") + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": { + "bindingVersion": "custom", + "channel": "test", + "method": "subscribe", + } + } + + def test_channel_pattern_subscriber(self): + broker = self.broker_class() + + @broker.subscriber("test.{path}") + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": { + "bindingVersion": "custom", + "channel": "test.*", + "method": "psubscribe", + } + } + + def test_list_subscriber(self): + broker = self.broker_class() + + @broker.subscriber(list="test") + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": {"bindingVersion": "custom", "channel": "test", "method": "lpop"} + } + + def test_stream_subscriber(self): + broker = self.broker_class() + + @broker.subscriber(stream="test") + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": {"bindingVersion": "custom", "channel": "test", "method": "xread"} + } + + def test_stream_group_subscriber(self): + broker = self.broker_class() + + @broker.subscriber(stream=StreamSub("test", group="group", consumer="consumer")) + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": { + "bindingVersion": "custom", + "channel": "test", + "consumer_name": "consumer", + "group_name": "group", + "method": "xreadgroup", + } + } diff --git a/tests/asyncapi/redis/test_connection.py b/tests/asyncapi/redis/test_connection.py new file mode 100644 index 0000000000..a5719d4a77 --- /dev/null +++ b/tests/asyncapi/redis/test_connection.py @@ -0,0 +1,60 @@ +from faststream import FastStream +from faststream.asyncapi.generate import get_app_schema +from faststream.asyncapi.schema import Tag +from faststream.redis import RedisBroker + + +def test_base(): + schema = get_app_schema( + FastStream( + RedisBroker( + "redis://localhost:6379", + protocol="plaintext", + protocol_version="0.9.0", + description="Test description", + tags=(Tag(name="some-tag", description="experimental"),), + ) + ) + ).to_jsonable() + + assert schema == { + "asyncapi": "2.6.0", + "channels": {}, + "components": {"messages": {}, "schemas": {}}, + "defaultContentType": "application/json", + "info": {"description": "", "title": "FastStream", "version": "0.1.0"}, + "servers": { + "development": { + "description": "Test description", + "protocol": "plaintext", + "protocolVersion": "0.9.0", + "tags": [{"description": "experimental", "name": "some-tag"}], + "url": "redis://localhost:6379", + } + }, + }, schema + + +def test_custom(): + schema = get_app_schema( + FastStream( + RedisBroker( + "redis://localhost:6379", asyncapi_url="rediss://127.0.0.1:8000" + ) + ) + ).to_jsonable() + + assert schema == { + "asyncapi": "2.6.0", + "channels": {}, + "components": {"messages": {}, "schemas": {}}, + "defaultContentType": "application/json", + "info": {"description": "", "title": "FastStream", "version": "0.1.0"}, + "servers": { + "development": { + "protocol": "rediss", + "protocolVersion": "custom", + "url": "rediss://127.0.0.1:8000", + } + }, + } diff --git a/tests/asyncapi/redis/test_fastapi.py b/tests/asyncapi/redis/test_fastapi.py new file mode 100644 index 0000000000..661935a850 --- /dev/null +++ b/tests/asyncapi/redis/test_fastapi.py @@ -0,0 +1,22 @@ +from typing import Type + +from faststream.redis.fastapi import RedisRouter +from faststream.redis.test import TestRedisBroker +from tests.asyncapi.base.arguments import FastAPICompatible +from tests.asyncapi.base.fastapi import FastAPITestCase +from tests.asyncapi.base.publisher import PublisherTestcase + + +class TestRouterArguments(FastAPITestCase, FastAPICompatible): + broker_class: Type[RedisRouter] = RedisRouter + broker_wrapper = staticmethod(TestRedisBroker) + + def build_app(self, router): + return router + + +class TestRouterPublisher(PublisherTestcase): + broker_class = RedisRouter + + def build_app(self, router): + return router diff --git a/tests/asyncapi/redis/test_naming.py b/tests/asyncapi/redis/test_naming.py new file mode 100644 index 0000000000..dc6640d050 --- /dev/null +++ b/tests/asyncapi/redis/test_naming.py @@ -0,0 +1,95 @@ +import pytest + +from faststream import FastStream +from faststream.asyncapi.generate import get_app_schema +from faststream.redis import RedisBroker +from tests.asyncapi.base.naming import NamingTestCase + + +class TestNaming(NamingTestCase): + broker_class = RedisBroker + + def test_base(self): + broker = self.broker_class() + + @broker.subscriber("test") + async def handle(): + ... + + schema = get_app_schema(FastStream(broker)).to_jsonable() + + assert schema == { + "asyncapi": "2.6.0", + "channels": { + "test:Handle": { + "bindings": { + "redis": { + "bindingVersion": "custom", + "channel": "test", + "method": "subscribe", + } + }, + "servers": ["development"], + "subscribe": { + "message": {"$ref": "#/components/messages/test:Handle:Message"} + }, + } + }, + "components": { + "messages": { + "test:Handle:Message": { + "correlationId": { + "location": "$message.header#/correlation_id" + }, + "payload": {"$ref": "#/components/schemas/EmptyPayload"}, + "title": "test:Handle:Message", + } + }, + "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}}, + }, + "defaultContentType": "application/json", + "info": {"description": "", "title": "FastStream", "version": "0.1.0"}, + "servers": { + "development": { + "protocol": "redis", + "protocolVersion": "custom", + "url": "redis://localhost:6379", + } + }, + }, schema + + @pytest.mark.parametrize( + "args", + ( + pytest.param({"channel": "test"}, id="channel"), + pytest.param({"list": "test"}, id="list"), + pytest.param({"stream": "test"}, id="stream"), + ), + ) + def test_subscribers_variations(self, args): + broker = self.broker_class() + + @broker.subscriber(**args) + async def handle(): + ... + + schema = get_app_schema(FastStream(broker)) + assert list(schema.channels.keys()) == ["test:Handle"] + + @pytest.mark.parametrize( + "args", + ( + pytest.param({"channel": "test"}, id="channel"), + pytest.param({"list": "test"}, id="list"), + pytest.param({"stream": "test"}, id="stream"), + ), + ) + def test_publisher_variations(self, args): + broker = self.broker_class() + + @broker.publisher(**args) + async def handle(): + ... + + schema = get_app_schema(FastStream(broker)) + assert list(schema.channels.keys()) == ["test:Publisher"] diff --git a/tests/asyncapi/redis/test_publisher.py b/tests/asyncapi/redis/test_publisher.py new file mode 100644 index 0000000000..ce80a9da6c --- /dev/null +++ b/tests/asyncapi/redis/test_publisher.py @@ -0,0 +1,53 @@ +from faststream.asyncapi.generate import get_app_schema +from faststream.redis import RedisBroker +from tests.asyncapi.base.publisher import PublisherTestcase + + +class TestArguments(PublisherTestcase): + broker_class = RedisBroker + + def test_channel_publisher(self): + broker = self.broker_class() + + @broker.publisher("test") + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": { + "bindingVersion": "custom", + "channel": "test", + "method": "publish", + } + } + + def test_list_publisher(self): + broker = self.broker_class() + + @broker.publisher(list="test") + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": {"bindingVersion": "custom", "channel": "test", "method": "rpush"} + } + + def test_stream_publisher(self): + broker = self.broker_class() + + @broker.publisher(stream="test") + async def handle(msg): + ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + key = tuple(schema["channels"].keys())[0] + + assert schema["channels"][key]["bindings"] == { + "redis": {"bindingVersion": "custom", "channel": "test", "method": "xadd"} + } diff --git a/tests/asyncapi/redis/test_router.py b/tests/asyncapi/redis/test_router.py new file mode 100644 index 0000000000..9dccc6d970 --- /dev/null +++ b/tests/asyncapi/redis/test_router.py @@ -0,0 +1,89 @@ +from faststream import FastStream +from faststream.asyncapi.generate import get_app_schema +from faststream.redis import RedisBroker, RedisRoute, RedisRouter +from tests.asyncapi.base.arguments import ArgumentsTestcase +from tests.asyncapi.base.publisher import PublisherTestcase +from tests.asyncapi.base.router import RouterTestcase + + +class TestRouter(RouterTestcase): + broker_class = RedisBroker + router_class = RedisRouter + route_class = RedisRoute + + def test_prefix(self): + broker = self.broker_class() + + router = self.router_class(prefix="test_") + + @router.subscriber("test") + async def handle(msg): + ... + + broker.include_router(router) + + schema = get_app_schema(FastStream(broker)).to_jsonable() + + assert schema == { + "asyncapi": "2.6.0", + "channels": { + "test_test:Handle": { + "bindings": { + "redis": { + "bindingVersion": "custom", + "channel": "test_test", + "method": "subscribe", + } + }, + "servers": ["development"], + "subscribe": { + "message": { + "$ref": "#/components/messages/test_test:Handle:Message" + } + }, + } + }, + "components": { + "messages": { + "test_test:Handle:Message": { + "correlationId": { + "location": "$message.header#/correlation_id" + }, + "payload": { + "$ref": "#/components/schemas/Handle:Message:Payload" + }, + "title": "test_test:Handle:Message", + } + }, + "schemas": { + "Handle:Message:Payload": {"title": "Handle:Message:Payload"} + }, + }, + "defaultContentType": "application/json", + "info": {"description": "", "title": "FastStream", "version": "0.1.0"}, + "servers": { + "development": { + "protocol": "redis", + "protocolVersion": "custom", + "url": "redis://localhost:6379", + } + }, + } + + +class TestRouterArguments(ArgumentsTestcase): + broker_class = RedisRouter + + def build_app(self, router): + broker = RedisBroker() + broker.include_router(router) + return FastStream(broker) + + +class TestRouterPublisher(PublisherTestcase): + broker_class = RedisRouter + + def build_app(self, router): + broker = RedisBroker() + broker.include_router(router) + return FastStream(broker) diff --git a/tests/brokers/base/consume.py b/tests/brokers/base/consume.py index ab59e86ae2..c41301b22f 100644 --- a/tests/brokers/base/consume.py +++ b/tests/brokers/base/consume.py @@ -23,14 +23,15 @@ async def test_consume( def subscriber(m): event.set() - await consume_broker.start() - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) assert event.is_set() @@ -52,16 +53,17 @@ def subscriber(m): else: consume2.set() - await consume_broker.start() - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume_broker.publish("hello", queue + "1")), - asyncio.create_task(consume.wait()), - asyncio.create_task(consume2.wait()), - ), - timeout=3, - ) + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(consume_broker.publish("hello", queue + "1")), + asyncio.create_task(consume.wait()), + asyncio.create_task(consume2.wait()), + ), + timeout=3, + ) assert consume2.is_set() assert consume.is_set() @@ -84,17 +86,17 @@ async def handler(m): else: consume2.set() - await consume_broker.start() - - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume.wait()), - asyncio.create_task(consume2.wait()), - ), - timeout=3, - ) + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(consume.wait()), + asyncio.create_task(consume2.wait()), + ), + timeout=3, + ) assert consume2.is_set() assert consume.is_set() @@ -121,17 +123,17 @@ def handler2(m): mock.handler2() consume2.set() - await consume_broker.start() - - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume_broker.publish("hello", another_topic)), - asyncio.create_task(consume.wait()), - asyncio.create_task(consume2.wait()), - ), - timeout=3, - ) + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(consume_broker.publish("hello", another_topic)), + asyncio.create_task(consume.wait()), + asyncio.create_task(consume2.wait()), + ), + timeout=3, + ) assert consume.is_set() assert consume2.is_set() @@ -159,17 +161,19 @@ async def handler2(m): mock.handler2(m) consume2.set() - await consume_broker.start() - - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish({"msg": "hello"}, queue)), - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume.wait()), - asyncio.create_task(consume2.wait()), - ), - timeout=3, - ) + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task( + consume_broker.publish({"msg": "hello"}, queue) + ), + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(consume.wait()), + asyncio.create_task(consume2.wait()), + ), + timeout=3, + ) assert consume.is_set() assert consume2.is_set() @@ -193,17 +197,18 @@ def subscriber(m): event.set() raise StopConsume() - await consume_broker.start() - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - await asyncio.sleep(0.5) - await consume_broker.publish("hello", queue) - await asyncio.sleep(0.5) + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + await asyncio.sleep(0.5) + await consume_broker.publish("hello", queue) + await asyncio.sleep(0.5) assert event.is_set() mock.assert_called_once() diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py index e513877c6f..dd0bcffcde 100644 --- a/tests/brokers/base/publish.py +++ b/tests/brokers/base/publish.py @@ -28,21 +28,78 @@ def pub_broker(self, full_broker): @pytest.mark.parametrize( ("message", "message_type", "expected_message"), ( - ("hello", str, "hello"), - (b"hello", bytes, b"hello"), - (1, int, 1), - (1.0, float, 1.0), - (False, bool, False), - ({"m": 1}, Dict[str, int], {"m": 1}), - ([1, 2, 3], List[int], [1, 2, 3]), - (now, datetime, now), - ( + pytest.param( + "hello", + str, + "hello", + id="str->str", + ), + pytest.param( + b"hello", + bytes, + b"hello", + id="bytes->bytes", + ), + pytest.param( + 1, + int, + 1, + id="int->int", + ), + pytest.param( + 1.0, + float, + 1.0, + id="float->float", + ), + pytest.param( + False, + bool, + False, + id="bool->bool", + ), + pytest.param( + {"m": 1}, + Dict[str, int], + {"m": 1}, + id="dict->dict", + ), + pytest.param( + [1, 2, 3], + List[int], + [1, 2, 3], + id="list->list", + ), + pytest.param( + now, + datetime, + now, + id="datetime->datetime", + ), + pytest.param( model_to_json(SimpleModel(r="hello!")).encode(), SimpleModel, SimpleModel(r="hello!"), + id="bytes->model", + ), + pytest.param( + SimpleModel(r="hello!"), + SimpleModel, + SimpleModel(r="hello!"), + id="model->model", + ), + pytest.param( + SimpleModel(r="hello!"), + dict, + {"r": "hello!"}, + id="model->dict", + ), + pytest.param( + {"r": "hello!"}, + SimpleModel, + SimpleModel(r="hello!"), + id="dict->model", ), - (SimpleModel(r="hello!"), SimpleModel, SimpleModel(r="hello!")), - (SimpleModel(r="hello!"), dict, {"r": "hello!"}), ), ) async def test_serialize( @@ -333,3 +390,32 @@ async def handler(m): assert event.is_set() mock.assert_called_with("Hello!") + + @pytest.mark.asyncio + async def test_publisher_after_start( + self, + pub_broker: BrokerUsecase, + queue: str, + event, + mock, + ): + @pub_broker.subscriber(queue) + async def handler(m): + event.set() + mock(m) + + async with pub_broker: + await pub_broker.start() + + pub = pub_broker.publisher(queue) + + await asyncio.wait( + ( + asyncio.create_task(pub.publish("Hello!")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_with("Hello!") diff --git a/tests/brokers/base/router.py b/tests/brokers/base/router.py index 96d6367d97..ff908f5beb 100644 --- a/tests/brokers/base/router.py +++ b/tests/brokers/base/router.py @@ -44,17 +44,18 @@ def subscriber(m): pub_broker.include_router(router) - await pub_broker.start() - - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + async with pub_broker: + await pub_broker.start() + + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_not_empty_prefix( self, @@ -71,17 +72,18 @@ def subscriber(m): pub_broker.include_router(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_empty_prefix_publisher( self, @@ -101,17 +103,18 @@ def response(m): pub_broker.include_router(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_not_empty_prefix_publisher( self, @@ -133,17 +136,18 @@ def response(m): pub_broker.include_router(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_manual_publisher( self, @@ -166,17 +170,18 @@ def response(m): pub_broker.include_router(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_delayed_handlers( self, @@ -192,17 +197,18 @@ def response(m): pub_broker.include_router(r) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_nested_routers_sub( self, @@ -224,20 +230,21 @@ def subscriber(m): core_router.include_routers(router) pub_broker.include_routers(core_router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task( - pub_broker.publish("hello", f"test1_test2_{queue}") + await asyncio.wait( + ( + asyncio.create_task( + pub_broker.publish("hello", f"test1_test2_{queue}") + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) - assert event.is_set() - mock.assert_called_with("hello") + assert event.is_set() + mock.assert_called_with("hello") async def test_nested_routers_pub( self, @@ -261,19 +268,20 @@ def response(m): core_router.include_routers(router) pub_broker.include_routers(core_router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task( - pub_broker.publish("hello", f"test1_test2_{queue}") + await asyncio.wait( + ( + asyncio.create_task( + pub_broker.publish("hello", f"test1_test2_{queue}") + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_router_dependencies( self, @@ -300,19 +308,20 @@ def subscriber(s): pub_broker.include_routers(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() - mock.dep1.assert_called_once() - mock.dep2.assert_called_once() + assert event.is_set() + mock.dep1.assert_called_once() + mock.dep2.assert_called_once() async def test_router_middlewares( self, @@ -339,19 +348,20 @@ def subscriber(s): pub_broker.include_routers(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() - mock.mid1.assert_called_once() - mock.mid2.assert_called_once() + assert event.is_set() + mock.mid1.assert_called_once() + mock.mid2.assert_called_once() async def test_router_parser( self, @@ -380,19 +390,20 @@ def subscriber(s): pub_broker.include_routers(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() - mock.parser.assert_called_once() - mock.decoder.assert_called_once() + assert event.is_set() + mock.parser.assert_called_once() + mock.decoder.assert_called_once() async def test_router_parser_override( self, @@ -429,20 +440,21 @@ def subscriber(s): pub_broker.include_routers(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() - assert not mock.called - mock.parser.assert_called_once() - mock.decoder.assert_called_once() + assert event.is_set() + assert not mock.called + mock.parser.assert_called_once() + mock.decoder.assert_called_once() @pytest.mark.asyncio @@ -468,18 +480,19 @@ def subscriber(m): pub_broker.include_router(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() - pub.mock.assert_called_with("hi") + assert event.is_set() + pub.mock.assert_called_with("hi") async def test_subscriber_mock( self, @@ -495,18 +508,19 @@ def subscriber(m): pub_broker.include_router(router) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task(pub_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() - subscriber.mock.assert_called_with("hello") + assert event.is_set() + subscriber.mock.assert_called_with("hello") async def test_manual_publisher_mock( self, router: BrokerRouter, queue: str, pub_broker: BrokerAsyncUsecase @@ -518,6 +532,7 @@ async def m(m): await publisher.publish("response") pub_broker.include_router(router) - await pub_broker.start() - await pub_broker.publish("hello", queue) - publisher.mock.assert_called_with("response") + async with pub_broker: + await pub_broker.start() + await pub_broker.publish("hello", queue) + publisher.mock.assert_called_with("response") diff --git a/tests/brokers/base/rpc.py b/tests/brokers/base/rpc.py index 8c0012abcd..71974e12e0 100644 --- a/tests/brokers/base/rpc.py +++ b/tests/brokers/base/rpc.py @@ -19,9 +19,9 @@ async def test_rpc(self, queue: str, rpc_broker: BrokerUsecase): async def m(m): # pragma: no cover return "1" - await rpc_broker.start() - - r = await rpc_broker.publish("hello", queue, rpc_timeout=3, rpc=True) + async with rpc_broker: + await rpc_broker.start() + r = await rpc_broker.publish("hello", queue, rpc_timeout=3, rpc=True) assert r == "1" @pytest.mark.asyncio @@ -30,16 +30,17 @@ async def test_rpc_timeout_raises(self, queue: str, rpc_broker: BrokerUsecase): async def m(m): # pragma: no cover await anyio.sleep(1) - await rpc_broker.start() + async with rpc_broker: + await rpc_broker.start() - with pytest.raises(TimeoutError): # pragma: no branch - await rpc_broker.publish( - "hello", - queue, - rpc=True, - rpc_timeout=0, - raise_timeout=True, - ) + with pytest.raises(TimeoutError): # pragma: no branch + await rpc_broker.publish( + "hello", + queue, + rpc=True, + rpc_timeout=0, + raise_timeout=True, + ) @pytest.mark.asyncio async def test_rpc_timeout_none(self, queue: str, rpc_broker: BrokerUsecase): @@ -47,14 +48,15 @@ async def test_rpc_timeout_none(self, queue: str, rpc_broker: BrokerUsecase): async def m(m): # pragma: no cover await anyio.sleep(1) - await rpc_broker.start() + async with rpc_broker: + await rpc_broker.start() - r = await rpc_broker.publish( - "hello", - queue, - rpc=True, - rpc_timeout=0, - ) + r = await rpc_broker.publish( + "hello", + queue, + rpc=True, + rpc_timeout=0, + ) assert r is None @@ -77,12 +79,13 @@ async def response_hanler(m: str): async def m(m): # pragma: no cover return "1" - await rpc_broker.start() + async with rpc_broker: + await rpc_broker.start() - await rpc_broker.publish("hello", queue, reply_to=reply_queue) + await rpc_broker.publish("hello", queue, reply_to=reply_queue) - with timeout_scope(3, True): - await event.wait() + with timeout_scope(3, True): + await event.wait() mock.assert_called_with("1") diff --git a/tests/brokers/kafka/test_consume.py b/tests/brokers/kafka/test_consume.py index 88cc0dc103..1a06faa451 100644 --- a/tests/brokers/kafka/test_consume.py +++ b/tests/brokers/kafka/test_consume.py @@ -45,23 +45,25 @@ async def test_consume_ack( async def handler(msg: KafkaMessage): event.set() - await full_broker.start() - with patch.object( - AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + + with patch.object( + AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=10, - ) - m.mock.assert_called_once() + timeout=10, + ) + m.mock.assert_called_once() assert event.is_set() @@ -78,23 +80,25 @@ async def handler(msg: KafkaMessage): await msg.ack() event.set() - await full_broker.start() - with patch.object( - AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + + with patch.object( + AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=10, - ) - m.mock.assert_called_once() + timeout=10, + ) + m.mock.assert_called_once() assert event.is_set() @@ -111,23 +115,25 @@ async def handler(msg: KafkaMessage): event.set() raise AckMessage() - await full_broker.start() - with patch.object( - AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + + with patch.object( + AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=10, - ) - m.mock.assert_called_once() + timeout=10, + ) + m.mock.assert_called_once() assert event.is_set() @@ -144,22 +150,24 @@ async def handler(msg: KafkaMessage): await msg.nack() event.set() - await full_broker.start() - with patch.object( - AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + + with patch.object( + AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=10, - ) - assert not m.mock.called + timeout=10, + ) + assert not m.mock.called assert event.is_set() diff --git a/tests/brokers/nats/conftest.py b/tests/brokers/nats/conftest.py index f8ac628779..a42eef1fdc 100644 --- a/tests/brokers/nats/conftest.py +++ b/tests/brokers/nats/conftest.py @@ -32,7 +32,7 @@ def router(): @pytest_asyncio.fixture -@pytest.mark.rabbit +@pytest.mark.nats async def broker(settings): broker = NatsBroker([settings.url], apply_types=False) async with broker: @@ -40,7 +40,7 @@ async def broker(settings): @pytest_asyncio.fixture -@pytest.mark.rabbit +@pytest.mark.nats async def full_broker(settings): broker = NatsBroker([settings.url]) async with broker: diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py index 268303f3ca..62584ca815 100644 --- a/tests/brokers/nats/test_consume.py +++ b/tests/brokers/nats/test_consume.py @@ -24,16 +24,17 @@ async def test_consume_js( def subscriber(m): event.set() - await consume_broker.start() - await asyncio.wait( - ( - asyncio.create_task( - consume_broker.publish("hello", queue, stream=stream.name) + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task( + consume_broker.publish("hello", queue, stream=stream.name) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) assert event.is_set() @@ -76,21 +77,22 @@ async def test_consume_ack( async def handler(msg: NatsMessage): event.set() - await full_broker.start() - with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @@ -107,21 +109,22 @@ async def handler(msg: NatsMessage): await msg.ack() event.set() - await full_broker.start() - with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @@ -138,21 +141,22 @@ async def handler(msg: NatsMessage): event.set() raise AckMessage() - await full_broker.start() - with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @@ -169,20 +173,21 @@ async def handler(msg: NatsMessage): await msg.nack() event.set() - await full_broker.start() - with patch.object(Msg, "nak", spy_decorator(Msg.nak)) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with full_broker: + await full_broker.start() + with patch.object(Msg, "nak", spy_decorator(Msg.nak)) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() diff --git a/tests/brokers/nats/test_rpc.py b/tests/brokers/nats/test_rpc.py index e0c8b26857..17d897b6f6 100644 --- a/tests/brokers/nats/test_rpc.py +++ b/tests/brokers/nats/test_rpc.py @@ -12,9 +12,10 @@ async def test_rpc_js(self, queue: str, rpc_broker: NatsBroker, stream: JStream) async def m(m): # pragma: no cover return "1" - await rpc_broker.start() + async with rpc_broker: + await rpc_broker.start() - r = await rpc_broker.publish( - "hello", queue, rpc_timeout=3, stream=stream.name, rpc=True - ) - assert r == "1" + r = await rpc_broker.publish( + "hello", queue, rpc_timeout=3, stream=stream.name, rpc=True + ) + assert r == "1" diff --git a/tests/brokers/rabbit/test_consume.py b/tests/brokers/rabbit/test_consume.py index b612399ec1..7d0fe0d94d 100644 --- a/tests/brokers/rabbit/test_consume.py +++ b/tests/brokers/rabbit/test_consume.py @@ -25,16 +25,17 @@ async def test_consume_from_exchange( def h(m): event.set() - await broker.start() - await asyncio.wait( - ( - asyncio.create_task( - broker.publish("hello", queue=queue, exchange=exchange) + async with broker: + await broker.start() + await asyncio.wait( + ( + asyncio.create_task( + broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) assert event.is_set() @@ -57,18 +58,19 @@ async def test_consume_with_get_old( def h(m): event.set() - await broker.start() - await asyncio.wait( - ( - asyncio.create_task( - broker.publish( - Message(b"hello"), queue=queue, exchange=exchange.name - ) + async with broker: + await broker.start() + await asyncio.wait( + ( + asyncio.create_task( + broker.publish( + Message(b"hello"), queue=queue, exchange=exchange.name + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) assert event.is_set() @@ -84,20 +86,21 @@ async def test_consume_ack( async def handler(msg: RabbitMessage): event.set() - await full_broker.start() - with patch.object( - IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with full_broker: + await full_broker.start() + with patch.object( + IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @@ -114,20 +117,21 @@ async def handler(msg: RabbitMessage): await msg.ack() event.set() - await full_broker.start() - with patch.object( - IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with full_broker: + await full_broker.start() + with patch.object( + IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @pytest.mark.asyncio @@ -145,20 +149,21 @@ async def handler(msg: RabbitMessage): finally: event.set() - await full_broker.start() - with patch.object( - IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with full_broker: + await full_broker.start() + with patch.object( + IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @pytest.mark.asyncio @@ -175,20 +180,21 @@ async def handler(msg: RabbitMessage): event.set() raise ValueError() - await full_broker.start() - with patch.object( - IncomingMessage, "nack", spy_decorator(IncomingMessage.nack) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with full_broker: + await full_broker.start() + with patch.object( + IncomingMessage, "nack", spy_decorator(IncomingMessage.nack) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @pytest.mark.asyncio @@ -206,20 +212,21 @@ async def handler(msg: RabbitMessage): finally: event.set() - await full_broker.start() - with patch.object( - IncomingMessage, "nack", spy_decorator(IncomingMessage.nack) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with full_broker: + await full_broker.start() + with patch.object( + IncomingMessage, "nack", spy_decorator(IncomingMessage.nack) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @pytest.mark.asyncio @@ -236,20 +243,21 @@ async def handler(msg: RabbitMessage): event.set() raise ValueError() - await full_broker.start() - with patch.object( - IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with full_broker: + await full_broker.start() + with patch.object( + IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @pytest.mark.asyncio @@ -267,20 +275,21 @@ async def handler(msg: RabbitMessage): finally: event.set() - await full_broker.start() - with patch.object( - IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with full_broker: + await full_broker.start() + with patch.object( + IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_called_once() + timeout=3, + ) + m.mock.assert_called_once() assert event.is_set() @pytest.mark.asyncio @@ -297,25 +306,28 @@ async def handler(msg: RabbitMessage): finally: event.set() - await full_broker.start() - with patch.object( - IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) - ) as m: + async with full_broker: + await full_broker.start() with patch.object( IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) - ) as m1: + ) as m: with patch.object( IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) - ) as m2: - await asyncio.wait( - ( - asyncio.create_task(full_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - assert not m.mock.called - assert not m1.mock.called - assert not m2.mock.called + ) as m1: + with patch.object( + IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) + ) as m2: + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + assert not m.mock.called + assert not m1.mock.called + assert not m2.mock.called assert event.is_set() diff --git a/tests/brokers/rabbit/test_router.py b/tests/brokers/rabbit/test_router.py index 965db3242a..4cc277940b 100644 --- a/tests/brokers/rabbit/test_router.py +++ b/tests/brokers/rabbit/test_router.py @@ -28,17 +28,20 @@ def subscriber(m): broker.include_router(router) - await broker.start() - - await asyncio.wait( - ( - asyncio.create_task(broker.publish("hello", f"test/{r_queue.name}")), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + async with broker: + await broker.start() + + await asyncio.wait( + ( + asyncio.create_task( + broker.publish("hello", f"test/{r_queue.name}") + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) - assert event.is_set() + assert event.is_set() async def test_delayed_handlers_with_queue( self, @@ -58,19 +61,20 @@ def response(m): pub_broker.include_router(r) - await pub_broker.start() + async with pub_broker: + await pub_broker.start() - await asyncio.wait( - ( - asyncio.create_task( - pub_broker.publish("hello", f"test/{r_queue.name}") + await asyncio.wait( + ( + asyncio.create_task( + pub_broker.publish("hello", f"test/{r_queue.name}") + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) - assert event.is_set() + assert event.is_set() class TestRouterLocal(RouterLocalTestcase): diff --git a/tests/brokers/redis/__init__.py b/tests/brokers/redis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/brokers/redis/conftest.py b/tests/brokers/redis/conftest.py new file mode 100644 index 0000000000..fa9ff65f0f --- /dev/null +++ b/tests/brokers/redis/conftest.py @@ -0,0 +1,50 @@ +from dataclasses import dataclass + +import pytest +import pytest_asyncio + +from faststream.redis import ( + RedisBroker, + RedisRouter, + TestRedisBroker, +) + + +@dataclass +class Settings: + url = "redis://localhost:6379" # pragma: allowlist secret + host = "localhost" + port = 6379 + + +@pytest.fixture(scope="session") +def settings(): + return Settings() + + +@pytest.fixture +def router(): + return RedisRouter() + + +@pytest_asyncio.fixture +@pytest.mark.redis +async def broker(settings): + broker = RedisBroker(settings.url, apply_types=False) + async with broker: + yield broker + + +@pytest_asyncio.fixture +@pytest.mark.redis +async def full_broker(settings): + broker = RedisBroker(settings.url) + async with broker: + yield broker + + +@pytest_asyncio.fixture +async def test_broker(): + broker = RedisBroker() + async with TestRedisBroker(broker) as br: + yield br diff --git a/tests/brokers/redis/test_connect.py b/tests/brokers/redis/test_connect.py new file mode 100644 index 0000000000..8028d577b7 --- /dev/null +++ b/tests/brokers/redis/test_connect.py @@ -0,0 +1,37 @@ +import pytest + +from faststream.redis import RedisBroker +from tests.brokers.base.connection import BrokerConnectionTestcase + + +@pytest.mark.redis +class TestConnection(BrokerConnectionTestcase): + broker = RedisBroker + + async def ping(self, broker: RedisBroker) -> bool: + await broker._connection.ping() + return True + + @pytest.mark.asyncio + async def test_init_connect_by_raw_data(self, settings): + async with RedisBroker( + "redis://localhost:6378", # will be ignored + host=settings.host, + port=settings.port, + ) as broker: + assert await self.ping(broker) + + @pytest.mark.asyncio + async def test_connect_merge_kwargs_with_priority(self, settings): + broker = self.broker(host="fake-host", port=6377) # kwargs will be ignored + assert await broker.connect( + host=settings.host, + port=settings.port, + ) + await broker.close() + + @pytest.mark.asyncio + async def test_connect_merge_args_and_kwargs_native(self, settings): + broker = self.broker("fake-url") # will be ignored + assert await broker.connect(url=settings.url) + await broker.close() diff --git a/tests/brokers/redis/test_consume.py b/tests/brokers/redis/test_consume.py new file mode 100644 index 0000000000..5b961d236b --- /dev/null +++ b/tests/brokers/redis/test_consume.py @@ -0,0 +1,305 @@ +import asyncio +from unittest.mock import MagicMock + +import pytest + +from faststream.redis import ListSub, PubSub, RedisBroker, StreamSub +from tests.brokers.base.consume import BrokerRealConsumeTestcase + + +@pytest.mark.redis +@pytest.mark.asyncio +class TestConsume(BrokerRealConsumeTestcase): + async def test_consume_native( + self, + consume_broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + queue: str, + ): + @consume_broker.subscriber(queue) + async def handler(msg): + mock(msg) + event.set() + + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task( + consume_broker._connection.publish(queue, "hello") + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with(b"hello") + + async def test_pattern_with_path( + self, + consume_broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + ): + @consume_broker.subscriber("test.{name}") + async def handler(msg): + mock(msg) + event.set() + + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", "test.name")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with("hello") + + async def test_pattern_without_path( + self, + consume_broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + ): + @consume_broker.subscriber(PubSub("test.*", pattern=True)) + async def handler(msg): + mock(msg) + event.set() + + async with consume_broker: + await consume_broker.start() + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", "test.name")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with("hello") + + +@pytest.mark.redis +@pytest.mark.asyncio +class TestConsumeList: + async def test_consume_list( + self, + broker: RedisBroker, + event: asyncio.Event, + queue: str, + mock: MagicMock, + ): + @broker.subscriber(list=queue) + async def handler(msg): + mock(msg) + event.set() + + async with broker: + await broker.start() + await asyncio.wait( + ( + asyncio.create_task(broker.publish("hello", list=queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with("hello") + + async def test_consume_list_native( + self, + broker: RedisBroker, + event: asyncio.Event, + queue: str, + mock: MagicMock, + ): + @broker.subscriber(list=queue) + async def handler(msg): + mock(msg) + event.set() + + async with broker: + await broker.start() + await asyncio.wait( + ( + asyncio.create_task(broker._connection.rpush(queue, "hello")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with(b"hello") + + async def test_consume_list_batch_with_one(self, queue: str, broker: RedisBroker): + msgs_queue = asyncio.Queue(maxsize=1) + + @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=1)) + async def handler(msg): + await msgs_queue.put(msg) + + async with broker: + await broker.start() + + await broker.publish("hi", list=queue) + + result, _ = await asyncio.wait( + (asyncio.create_task(msgs_queue.get()),), + timeout=3, + ) + + assert ["hi"] == [r.result()[0] for r in result] + + async def test_consume_list_batch(self, queue: str, broker: RedisBroker): + msgs_queue = asyncio.Queue(maxsize=1) + + @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=1)) + async def handler(msg): + await msgs_queue.put(msg) + + async with broker: + await broker.start() + + await broker.publish_batch(1, "hi", list=queue) + + result, _ = await asyncio.wait( + (asyncio.create_task(msgs_queue.get()),), + timeout=3, + ) + + assert [{1, "hi"}] == [set(r.result()) for r in result] + + async def test_consume_list_batch_native(self, queue: str, broker: RedisBroker): + msgs_queue = asyncio.Queue(maxsize=1) + + @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=1)) + async def handler(msg): + await msgs_queue.put(msg) + + async with broker: + await broker.start() + + await broker._connection.rpush(queue, 1, "hi") + + result, _ = await asyncio.wait( + (asyncio.create_task(msgs_queue.get()),), + timeout=3, + ) + + assert [{1, "hi"}] == [set(r.result()) for r in result] + + +@pytest.mark.redis +@pytest.mark.asyncio +@pytest.mark.slow +class TestConsumeStream: + async def test_consume_stream( + self, + broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + queue, + ): + @broker.subscriber(stream=StreamSub(queue, polling_interval=3000)) + async def handler(msg): + mock(msg) + event.set() + + async with broker: + await broker.start() + await asyncio.sleep(1) + + await asyncio.wait( + ( + asyncio.create_task(broker.publish("hello", stream=queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with("hello") + + async def test_consume_stream_native( + self, + broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + queue, + ): + @broker.subscriber(stream=StreamSub(queue, polling_interval=3000)) + async def handler(msg): + mock(msg) + event.set() + + async with broker: + await broker.start() + await asyncio.sleep(1) + + await asyncio.wait( + ( + asyncio.create_task( + broker._connection.xadd(queue, {"message": "hello"}) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with({"message": "hello"}) + + async def test_consume_stream_batch( + self, + broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + queue, + ): + @broker.subscriber(stream=StreamSub(queue, polling_interval=3000, batch=True)) + async def handler(msg): + mock(msg) + event.set() + + async with broker: + await broker.start() + await asyncio.sleep(1) + + await asyncio.wait( + ( + asyncio.create_task(broker.publish("hello", stream=queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with(["hello"]) + + async def test_consume_stream_batch_native( + self, + broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + queue, + ): + @broker.subscriber(stream=StreamSub(queue, polling_interval=3000, batch=True)) + async def handler(msg): + mock(msg) + event.set() + + async with broker: + await broker.start() + await asyncio.sleep(1) + + await asyncio.wait( + ( + asyncio.create_task( + broker._connection.xadd(queue, {"message": "hello"}) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + mock.assert_called_once_with([{"message": "hello"}]) diff --git a/tests/brokers/redis/test_fastapi.py b/tests/brokers/redis/test_fastapi.py new file mode 100644 index 0000000000..3c739b2210 --- /dev/null +++ b/tests/brokers/redis/test_fastapi.py @@ -0,0 +1,16 @@ +import pytest + +from faststream.redis.fastapi import RedisRouter +from faststream.redis.test import TestRedisBroker, build_message +from tests.brokers.base.fastapi import FastAPILocalTestcase, FastAPITestcase + + +@pytest.mark.redis +class TestRouter(FastAPITestcase): + router_class = RedisRouter + + +class TestRouterLocal(FastAPILocalTestcase): + router_class = RedisRouter + broker_test = staticmethod(TestRedisBroker) + build_message = staticmethod(build_message) diff --git a/tests/brokers/redis/test_middlewares.py b/tests/brokers/redis/test_middlewares.py new file mode 100644 index 0000000000..76c0b9d609 --- /dev/null +++ b/tests/brokers/redis/test_middlewares.py @@ -0,0 +1,9 @@ +import pytest + +from faststream.redis import RedisBroker +from tests.brokers.base.middlewares import MiddlewareTestcase + + +@pytest.mark.redis +class TestMiddlewares(MiddlewareTestcase): + broker_class = RedisBroker diff --git a/tests/brokers/redis/test_parser.py b/tests/brokers/redis/test_parser.py new file mode 100644 index 0000000000..c40306adc2 --- /dev/null +++ b/tests/brokers/redis/test_parser.py @@ -0,0 +1,9 @@ +import pytest + +from faststream.redis import RedisBroker +from tests.brokers.base.parser import CustomParserTestcase + + +@pytest.mark.redis +class TestCustomParser(CustomParserTestcase): + broker_class = RedisBroker diff --git a/tests/brokers/redis/test_publish.py b/tests/brokers/redis/test_publish.py new file mode 100644 index 0000000000..cfc9754d08 --- /dev/null +++ b/tests/brokers/redis/test_publish.py @@ -0,0 +1,95 @@ +import asyncio +from unittest.mock import MagicMock + +import pytest + +from faststream.redis import ListSub, RedisBroker +from tests.brokers.base.publish import BrokerPublishTestcase + + +@pytest.mark.redis +@pytest.mark.asyncio +class TestPublish(BrokerPublishTestcase): + async def test_list_publisher( + self, + queue: str, + pub_broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + ): + @pub_broker.subscriber(list=queue) + @pub_broker.publisher(list=queue + "resp") + async def m(): + return "" + + @pub_broker.subscriber(list=queue + "resp") + async def resp(msg): + event.set() + mock(msg) + + async with pub_broker: + await pub_broker.start() + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("", list=queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with("") + + async def test_list_publish_batch(self, queue: str, broker: RedisBroker): + msgs_queue = asyncio.Queue(maxsize=2) + + @broker.subscriber(list=queue) + async def handler(msg): + await msgs_queue.put(msg) + + async with broker: + await broker.start() + + await broker.publish_batch(1, "hi", list=queue) + + result, _ = await asyncio.wait( + ( + asyncio.create_task(msgs_queue.get()), + asyncio.create_task(msgs_queue.get()), + ), + timeout=3, + ) + + assert {1, b"hi"} == {r.result() for r in result} + + async def test_batch_list_publisher( + self, + queue: str, + pub_broker: RedisBroker, + event: asyncio.Event, + mock: MagicMock, + ): + batch_list = ListSub(queue + "resp", batch=True) + + @pub_broker.subscriber(list=queue) + @pub_broker.publisher(list=batch_list) + async def m(): + return 1, 2, 3 + + @pub_broker.subscriber(list=batch_list) + async def resp(msg): + event.set() + mock(msg) + + async with pub_broker: + await pub_broker.start() + await asyncio.wait( + ( + asyncio.create_task(pub_broker.publish("", list=queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with([1, 2, 3]) diff --git a/tests/brokers/redis/test_router.py b/tests/brokers/redis/test_router.py new file mode 100644 index 0000000000..14c7c8bb8e --- /dev/null +++ b/tests/brokers/redis/test_router.py @@ -0,0 +1,73 @@ +import asyncio + +import pytest + +from faststream.redis import RedisBroker, RedisRoute, RedisRouter +from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase + + +@pytest.mark.redis +class TestRouter(RouterTestcase): + broker_class = RedisRouter + route_class = RedisRoute + + +class TestRouterLocal(RouterLocalTestcase): + broker_class = RedisRouter + route_class = RedisRoute + + async def test_delayed_list_handlers( + self, + event: asyncio.Event, + queue: str, + pub_broker: RedisBroker, + ): + def response(m): + event.set() + + r = RedisRouter(prefix="test_", handlers=(RedisRoute(response, list=queue),)) + + pub_broker.include_router(r) + + async with pub_broker: + await pub_broker.start() + + await asyncio.wait( + ( + asyncio.create_task( + pub_broker.publish("hello", list=f"test_{queue}") + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + + async def test_delayed_stream_handlers( + self, + event: asyncio.Event, + queue: str, + pub_broker: RedisBroker, + ): + def response(m): + event.set() + + r = RedisRouter(prefix="test_", handlers=(RedisRoute(response, stream=queue),)) + + pub_broker.include_router(r) + + async with pub_broker: + await pub_broker.start() + + await asyncio.wait( + ( + asyncio.create_task( + pub_broker.publish("hello", stream=f"test_{queue}") + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() diff --git a/tests/brokers/redis/test_rpc.py b/tests/brokers/redis/test_rpc.py new file mode 100644 index 0000000000..f076ce3b96 --- /dev/null +++ b/tests/brokers/redis/test_rpc.py @@ -0,0 +1,19 @@ +import pytest + +from faststream.redis import RedisBroker +from tests.brokers.base.rpc import BrokerRPCTestcase, ReplyAndConsumeForbidden + + +@pytest.mark.redis +class TestRPC(BrokerRPCTestcase, ReplyAndConsumeForbidden): + @pytest.mark.asyncio + async def test_list_rpc(self, queue: str, rpc_broker: RedisBroker): + @rpc_broker.subscriber(list=queue) + async def m(m): # pragma: no cover + return "1" + + async with rpc_broker: + await rpc_broker.start() + r = await rpc_broker.publish("hello", list=queue, rpc_timeout=3, rpc=True) + + assert r == "1" diff --git a/tests/brokers/redis/test_schemas.py b/tests/brokers/redis/test_schemas.py new file mode 100644 index 0000000000..4125ea9d2d --- /dev/null +++ b/tests/brokers/redis/test_schemas.py @@ -0,0 +1,13 @@ +import pytest + +from faststream.redis import StreamSub + + +def test_stream_group(): + with pytest.raises(ValueError): + StreamSub("test", group="group") + + with pytest.raises(ValueError): + StreamSub("test", consumer="consumer") + + StreamSub("test", group="group", consumer="consumer") diff --git a/tests/brokers/redis/test_test_client.py b/tests/brokers/redis/test_test_client.py new file mode 100644 index 0000000000..6fe7a1ec2f --- /dev/null +++ b/tests/brokers/redis/test_test_client.py @@ -0,0 +1,201 @@ +import asyncio + +import pytest + +from faststream import BaseMiddleware +from faststream.redis import ListSub, RedisBroker, StreamSub, TestRedisBroker +from tests.brokers.base.testclient import BrokerTestclientTestcase + + +@pytest.mark.asyncio +class TestTestclient(BrokerTestclientTestcase): + @pytest.mark.redis + async def test_with_real_testclient( + self, + broker: RedisBroker, + queue: str, + event: asyncio.Event, + ): + @broker.subscriber(queue) + def subscriber(m): + event.set() + + async with TestRedisBroker(broker, with_real=True) as br: + await asyncio.wait( + ( + asyncio.create_task(br.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + + async def test_respect_middleware(self, queue): + routes = [] + + class Middleware(BaseMiddleware): + async def on_receive(self) -> None: + routes.append(None) + return await super().on_receive() + + broker = RedisBroker() + broker.middlewares = (Middleware,) + + @broker.subscriber(queue) + async def h1(): + ... + + @broker.subscriber(queue + "1") + async def h2(): + ... + + async with TestRedisBroker(broker) as br: + await br.publish("", queue) + await br.publish("", queue + "1") + + assert len(routes) == 2 + + @pytest.mark.redis + async def test_real_respect_middleware(self, queue): + routes = [] + + class Middleware(BaseMiddleware): + async def on_receive(self) -> None: + routes.append(None) + return await super().on_receive() + + broker = RedisBroker() + broker.middlewares = (Middleware,) + + @broker.subscriber(queue) + async def h1(): + ... + + @broker.subscriber(queue + "1") + async def h2(): + ... + + async with TestRedisBroker(broker, with_real=True) as br: + await br.publish("", queue) + await br.publish("", queue + "1") + await h1.wait_call(3) + await h2.wait_call(3) + + assert len(routes) == 2 + + async def test_pub_sub_pattern( + self, + test_broker: RedisBroker, + ): + @test_broker.subscriber("test.{name}") + async def handler(msg): + return msg + + await test_broker.start() + + assert 1 == await test_broker.publish(1, "test.name.useless", rpc=True) + handler.mock.assert_called_once_with(1) + + async def test_list( + self, + test_broker: RedisBroker, + queue: str, + ): + @test_broker.subscriber(list=queue) + async def handler(msg): + return msg + + await test_broker.start() + + assert 1 == await test_broker.publish(1, list=queue, rpc=True) + handler.mock.assert_called_once_with(1) + + async def test_batch_pub_by_default_pub( + self, + test_broker: RedisBroker, + queue: str, + ): + @test_broker.subscriber(list=ListSub(queue, batch=True)) + async def m(): + pass + + await test_broker.start() + await test_broker.publish("hello", list=queue) + m.mock.assert_called_once_with(["hello"]) + + async def test_batch_pub_by_pub_batch( + self, + test_broker: RedisBroker, + queue: str, + ): + @test_broker.subscriber(list=ListSub(queue, batch=True)) + async def m(): + pass + + await test_broker.start() + await test_broker.publish_batch("hello", list=queue) + m.mock.assert_called_once_with(["hello"]) + + async def test_batch_publisher_mock( + self, + test_broker: RedisBroker, + queue: str, + ): + batch_list = ListSub(queue + "1", batch=True) + publisher = test_broker.publisher(list=batch_list) + + @publisher + @test_broker.subscriber(queue) + async def m(): + return 1, 2, 3 + + await test_broker.start() + await test_broker.publish("hello", queue) + m.mock.assert_called_once_with("hello") + publisher.mock.assert_called_once_with([1, 2, 3]) + + async def test_stream( + self, + test_broker: RedisBroker, + queue: str, + ): + @test_broker.subscriber(stream=queue) + async def handler(msg): + return msg + + await test_broker.start() + + assert 1 == await test_broker.publish(1, stream=queue, rpc=True) + handler.mock.assert_called_once_with(1) + + async def test_stream_batch_pub_by_default_pub( + self, + test_broker: RedisBroker, + queue: str, + ): + @test_broker.subscriber(stream=StreamSub(queue, batch=True)) + async def m(): + pass + + await test_broker.start() + await test_broker.publish("hello", stream=queue) + m.mock.assert_called_once_with(["hello"]) + + async def test_stream_publisher( + self, + test_broker: RedisBroker, + queue: str, + ): + batch_stream = StreamSub(queue + "1") + publisher = test_broker.publisher(stream=batch_stream) + + @publisher + @test_broker.subscriber(queue) + async def m(): + return 1, 2, 3 + + await test_broker.start() + await test_broker.publish("hello", queue) + m.mock.assert_called_once_with("hello") + publisher.mock.assert_called_once_with([1, 2, 3]) diff --git a/tests/cli/test_app.py b/tests/cli/test_app.py index 6497468b9f..b0195ea9b1 100644 --- a/tests/cli/test_app.py +++ b/tests/cli/test_app.py @@ -2,7 +2,8 @@ import os import signal import sys -from unittest.mock import Mock, patch +from contextlib import asynccontextmanager +from unittest.mock import AsyncMock, Mock, patch import anyio import pytest @@ -132,6 +133,30 @@ async def test_running(async_mock, app: FastStream): async_mock.broker_stopped.assert_called_once() +@pytest.mark.asyncio +async def test_running_lifespan_contextmanager(async_mock, mock: Mock, app: FastStream): + @asynccontextmanager + async def lifespan(env: str): + mock.on(env) + yield + mock.off() + + app = FastStream(app.broker, lifespan=lifespan) + + app._init_async_cycle() + app._stop_event.set() + + with patch.object(app.broker, "start", async_mock.broker_run): + with patch.object(app.broker, "close", async_mock.broker_stopped): + await app.run(run_extra_options={"env": "test"}) + + async_mock.broker_run.assert_called_once() + async_mock.broker_stopped.assert_called_once() + + mock.on.assert_called_once_with("test") + mock.off.assert_called_once() + + @pytest.mark.asyncio @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") async def test_stop_with_sigint(async_mock, app: FastStream): @@ -218,5 +243,46 @@ def test_sync_test_app_with_excp(mock: Mock): mock.off.assert_called_once() +@pytest.mark.asyncio +async def test_lifespan_contextmanager(async_mock: AsyncMock, app: FastStream): + @asynccontextmanager + async def lifespan(env: str): + await async_mock.on(env) + yield + await async_mock.off() + + app = FastStream(app.broker, lifespan=lifespan) + + with patch.object(app.broker, "start", async_mock.broker_run): + with patch.object(app.broker, "close", async_mock.broker_stopped): + async with TestApp(app, {"env": "test"}): + pass + + async_mock.on.assert_awaited_once_with("test") + async_mock.off.assert_awaited_once() + async_mock.broker_run.assert_called_once() + async_mock.broker_stopped.assert_called_once() + + +def test_sync_lifespan_contextmanager(async_mock: AsyncMock, app: FastStream): + @asynccontextmanager + async def lifespan(env: str): + await async_mock.on(env) + yield + await async_mock.off() + + app = FastStream(app.broker, lifespan=lifespan) + + with patch.object(app.broker, "start", async_mock.broker_run): + with patch.object(app.broker, "close", async_mock.broker_stopped): + with TestApp(app, {"env": "test"}): + pass + + async_mock.on.assert_awaited_once_with("test") + async_mock.off.assert_awaited_once() + async_mock.broker_run.assert_called_once() + async_mock.broker_stopped.assert_called_once() + + async def _kill(sig): os.kill(os.getpid(), sig) diff --git a/tests/cli/utils/test_imports.py b/tests/cli/utils/test_imports.py index 299d4b1118..8323fcbcac 100644 --- a/tests/cli/utils/test_imports.py +++ b/tests/cli/utils/test_imports.py @@ -14,7 +14,7 @@ def test_import_wrong(): @pytest.mark.parametrize( - "test_input,exp_module,exp_app", + ("test_input", "exp_module", "exp_app"), ( pytest.param("module:app", "module", "app"), pytest.param("module.module.module:app", "module/module/module", "app"), @@ -37,7 +37,7 @@ def test_import_from_string_import_wrong(): @pytest.mark.parametrize( - "test_input,exp_module", + ("test_input", "exp_module"), ( pytest.param("examples.kafka.testing:app", "examples/kafka/testing.py"), pytest.param("examples.nats.e01_basic:app", "examples/nats/e01_basic.py"), diff --git a/tests/cli/utils/test_logs.py b/tests/cli/utils/test_logs.py index 38e9dea1a9..253f9eedaf 100644 --- a/tests/cli/utils/test_logs.py +++ b/tests/cli/utils/test_logs.py @@ -9,7 +9,7 @@ @pytest.mark.parametrize( - "level,broker", + ("level", "broker"), tuple( zip_longest( ( @@ -30,7 +30,7 @@ def test_set_level(level, app: FastStream): @pytest.mark.parametrize( - "level,broker", + ("level", "broker"), tuple( zip_longest( [], diff --git a/tests/cli/utils/test_parser.py b/tests/cli/utils/test_parser.py index d0a34ab0ef..45319659c3 100644 --- a/tests/cli/utils/test_parser.py +++ b/tests/cli/utils/test_parser.py @@ -22,18 +22,20 @@ "1", ) ARG6 = ("--some-key",) +ARG7 = ("--k7", "1", "--k7", "2") @pytest.mark.parametrize( "args", ( - (APPLICATION, *ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6), - (*ARG1, APPLICATION, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6), - (*ARG1, *ARG2, APPLICATION, *ARG3, *ARG4, *ARG5, *ARG6), - (*ARG1, *ARG2, *ARG3, APPLICATION, *ARG4, *ARG5, *ARG6), - (*ARG1, *ARG2, *ARG3, *ARG4, APPLICATION, *ARG5, *ARG6), - (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, APPLICATION, *ARG6), - (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, APPLICATION), + (APPLICATION, *ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7), + (*ARG1, APPLICATION, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7), + (*ARG1, *ARG2, APPLICATION, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7), + (*ARG1, *ARG2, *ARG3, APPLICATION, *ARG4, *ARG5, *ARG6, *ARG7), + (*ARG1, *ARG2, *ARG3, *ARG4, APPLICATION, *ARG5, *ARG6, *ARG7), + (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, APPLICATION, *ARG6, *ARG7), + (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, APPLICATION, *ARG7), + (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7, APPLICATION), ), ) def test_custom_argument_parsing(args: Tuple[str]): @@ -46,4 +48,5 @@ def test_custom_argument_parsing(args: Tuple[str]): "k4": False, "k5": ["1", "1"], "some_key": True, + "k7": ["1", "2"], } diff --git a/tests/docs/getting_started/asyncapi/__init__.py b/tests/docs/getting_started/asyncapi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/docs/getting_started/cli/__init__.py b/tests/docs/getting_started/cli/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/docs/getting_started/cli/test_kafka_context.py b/tests/docs/getting_started/cli/test_kafka_context.py index 25c0e9c986..4e6410b969 100644 --- a/tests/docs/getting_started/cli/test_kafka_context.py +++ b/tests/docs/getting_started/cli/test_kafka_context.py @@ -12,6 +12,5 @@ async def test(): with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.cli.kafka_context import app, broker - async with TestKafkaBroker(broker): - async with TestApp(app, {"env": ""}): - assert context.get("settings").host == "localhost" + async with TestKafkaBroker(broker), TestApp(app, {"env": ""}): + assert context.get("settings").host == "localhost" diff --git a/tests/docs/getting_started/cli/test_nats_context.py b/tests/docs/getting_started/cli/test_nats_context.py index 3aa25b7c1f..d8afed192c 100644 --- a/tests/docs/getting_started/cli/test_nats_context.py +++ b/tests/docs/getting_started/cli/test_nats_context.py @@ -12,6 +12,5 @@ async def test(): with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.cli.nats_context import app, broker - async with TestNatsBroker(broker): - async with TestApp(app, {"env": ""}): - assert context.get("settings").host == "localhost" + async with TestNatsBroker(broker), TestApp(app, {"env": ""}): + assert context.get("settings").host == "localhost" diff --git a/tests/docs/getting_started/cli/test_rabbit_context.py b/tests/docs/getting_started/cli/test_rabbit_context.py index 88d4cc5870..e4f45d78e5 100644 --- a/tests/docs/getting_started/cli/test_rabbit_context.py +++ b/tests/docs/getting_started/cli/test_rabbit_context.py @@ -14,9 +14,8 @@ async def test(): ): from docs.docs_src.getting_started.cli.rabbit_context import app, broker - async with TestRabbitBroker(broker): - async with TestApp(app, {"env": ".env"}): - assert ( - context.get("settings").host - == "amqp://guest:guest@localhost:5673/" # pragma: allowlist secret - ) + async with TestRabbitBroker(broker), TestApp(app, {"env": ".env"}): + assert ( + context.get("settings").host + == "amqp://guest:guest@localhost:5673/" # pragma: allowlist secret + ) diff --git a/tests/docs/getting_started/cli/test_redis_context.py b/tests/docs/getting_started/cli/test_redis_context.py new file mode 100644 index 0000000000..180f16df49 --- /dev/null +++ b/tests/docs/getting_started/cli/test_redis_context.py @@ -0,0 +1,16 @@ +import pytest + +from faststream import TestApp, context +from faststream.redis import TestRedisBroker +from tests.marks import pydanticV2 +from tests.mocks import mock_pydantic_settings_env + + +@pydanticV2 +@pytest.mark.asyncio +async def test(): + with mock_pydantic_settings_env({"host": "redis://localhost:6380"}): + from docs.docs_src.getting_started.cli.redis_context import app, broker + + async with TestRedisBroker(broker), TestApp(app, {"env": ".env"}): + assert context.get("settings").host == "redis://localhost:6380" diff --git a/tests/docs/getting_started/context/test_annotated.py b/tests/docs/getting_started/context/test_annotated.py index 63453590d5..564759871b 100644 --- a/tests/docs/getting_started/context/test_annotated.py +++ b/tests/docs/getting_started/context/test_annotated.py @@ -3,13 +3,14 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker from tests.marks import python39 @python39 @pytest.mark.asyncio async def test_annotated_kafka(): - from docs.docs_src.getting_started.context.annotated_kafka import ( + from docs.docs_src.getting_started.context.kafka.annotated import ( base_handler, broker, ) @@ -23,7 +24,7 @@ async def test_annotated_kafka(): @python39 @pytest.mark.asyncio async def test_annotated_rabbit(): - from docs.docs_src.getting_started.context.annotated_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.annotated import ( base_handler, broker, ) @@ -37,7 +38,7 @@ async def test_annotated_rabbit(): @python39 @pytest.mark.asyncio async def test_annotated_nats(): - from docs.docs_src.getting_started.context.annotated_nats import ( + from docs.docs_src.getting_started.context.nats.annotated import ( base_handler, broker, ) @@ -46,3 +47,17 @@ async def test_annotated_nats(): await br.publish("Hi!", "test") base_handler.mock.assert_called_once_with("Hi!") + + +@python39 +@pytest.mark.asyncio +async def test_annotated_redis(): + from docs.docs_src.getting_started.context.redis.annotated import ( + base_handler, + broker, + ) + + async with TestRedisBroker(broker) as br: + await br.publish("Hi!", "test") + + base_handler.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/context/test_base.py b/tests/docs/getting_started/context/test_base.py index 61abd56118..cfe9d0b95d 100644 --- a/tests/docs/getting_started/context/test_base.py +++ b/tests/docs/getting_started/context/test_base.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_base_kafka(): - from docs.docs_src.getting_started.context.base_kafka import base_handler, broker + from docs.docs_src.getting_started.context.kafka.base import base_handler, broker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -17,7 +18,7 @@ async def test_base_kafka(): @pytest.mark.asyncio async def test_base_rabbit(): - from docs.docs_src.getting_started.context.base_rabbit import base_handler, broker + from docs.docs_src.getting_started.context.rabbit.base import base_handler, broker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test") @@ -27,9 +28,19 @@ async def test_base_rabbit(): @pytest.mark.asyncio async def test_base_nats(): - from docs.docs_src.getting_started.context.base_nats import base_handler, broker + from docs.docs_src.getting_started.context.nats.base import base_handler, broker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test") base_handler.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_base_redis(): + from docs.docs_src.getting_started.context.redis.base import base_handler, broker + + async with TestRedisBroker(broker) as br: + await br.publish("Hi!", "test") + + base_handler.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/context/test_cast.py b/tests/docs/getting_started/context/test_cast.py index 63d9bf60c7..685613b24d 100644 --- a/tests/docs/getting_started/context/test_cast.py +++ b/tests/docs/getting_started/context/test_cast.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_cast_kafka(): - from docs.docs_src.getting_started.context.cast_kafka import ( + from docs.docs_src.getting_started.context.kafka.cast import ( broker, handle, handle_int, @@ -25,7 +26,7 @@ async def test_cast_kafka(): @pytest.mark.asyncio async def test_cast_rabbit(): - from docs.docs_src.getting_started.context.cast_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.cast import ( broker, handle, handle_int, @@ -43,7 +44,7 @@ async def test_cast_rabbit(): @pytest.mark.asyncio async def test_cast_nats(): - from docs.docs_src.getting_started.context.cast_nats import ( + from docs.docs_src.getting_started.context.nats.cast import ( broker, handle, handle_int, @@ -57,3 +58,21 @@ async def test_cast_nats(): await br.publish("Hi!", "test-subject2") handle_int.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_cast_redis(): + from docs.docs_src.getting_started.context.redis.cast import ( + broker, + handle, + handle_int, + ) + + async with TestRedisBroker(broker) as br: + await br.publish("Hi!", "test-channel") + + handle.mock.assert_called_once_with("Hi!") + + await br.publish("Hi!", "test-channel2") + + handle_int.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/context/test_custom_global.py b/tests/docs/getting_started/context/test_custom_global.py index 9727ba2933..030c705fa4 100644 --- a/tests/docs/getting_started/context/test_custom_global.py +++ b/tests/docs/getting_started/context/test_custom_global.py @@ -4,48 +4,60 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_custom_global_context_kafka(): - from docs.docs_src.getting_started.context.custom_global_context_kafka import ( + from docs.docs_src.getting_started.context.kafka.custom_global_context import ( app, broker, handle, ) - async with TestKafkaBroker(broker) as br: - async with TestApp(app): - await br.publish("Hi!", "test-topic") + async with TestKafkaBroker(broker) as br, TestApp(app): + await br.publish("Hi!", "test-topic") - handle.mock.assert_called_once_with("Hi!") + handle.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_custom_global_context_rabbit(): - from docs.docs_src.getting_started.context.custom_global_context_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.custom_global_context import ( app, broker, handle, ) - async with TestRabbitBroker(broker) as br: - async with TestApp(app): - await br.publish("Hi!", "test-queue") + async with TestRabbitBroker(broker) as br, TestApp(app): + await br.publish("Hi!", "test-queue") - handle.mock.assert_called_once_with("Hi!") + handle.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_custom_global_context_nats(): - from docs.docs_src.getting_started.context.custom_global_context_nats import ( + from docs.docs_src.getting_started.context.nats.custom_global_context import ( app, broker, handle, ) - async with TestNatsBroker(broker) as br: - async with TestApp(app): - await br.publish("Hi!", "test-subject") + async with TestNatsBroker(broker) as br, TestApp(app): + await br.publish("Hi!", "test-subject") - handle.mock.assert_called_once_with("Hi!") + handle.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_custom_global_context_redis(): + from docs.docs_src.getting_started.context.redis.custom_global_context import ( + app, + broker, + handle, + ) + + async with TestRedisBroker(broker) as br, TestApp(app): + await br.publish("Hi!", "test-channel") + + handle.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/context/test_custom_local.py b/tests/docs/getting_started/context/test_custom_local.py index 3a7251e53e..0c60e976c9 100644 --- a/tests/docs/getting_started/context/test_custom_local.py +++ b/tests/docs/getting_started/context/test_custom_local.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_custom_local_context_kafka(): - from docs.docs_src.getting_started.context.custom_local_context_kafka import ( + from docs.docs_src.getting_started.context.kafka.custom_local_context import ( broker, handle, ) @@ -20,7 +21,7 @@ async def test_custom_local_context_kafka(): @pytest.mark.asyncio async def test_custom_local_context_rabbit(): - from docs.docs_src.getting_started.context.custom_local_context_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.custom_local_context import ( broker, handle, ) @@ -33,7 +34,7 @@ async def test_custom_local_context_rabbit(): @pytest.mark.asyncio async def test_custom_local_context_nats(): - from docs.docs_src.getting_started.context.custom_local_context_nats import ( + from docs.docs_src.getting_started.context.nats.custom_local_context import ( broker, handle, ) @@ -42,3 +43,16 @@ async def test_custom_local_context_nats(): await br.publish("Hi!", "test-subject") handle.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_custom_local_context_redis(): + from docs.docs_src.getting_started.context.redis.custom_local_context import ( + broker, + handle, + ) + + async with TestRedisBroker(broker) as br: + await br.publish("Hi!", "test-channel") + + handle.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/context/test_default_arguments.py b/tests/docs/getting_started/context/test_default_arguments.py index 5b19adb05f..24596b3d9b 100644 --- a/tests/docs/getting_started/context/test_default_arguments.py +++ b/tests/docs/getting_started/context/test_default_arguments.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_default_arguments_kafka(): - from docs.docs_src.getting_started.context.default_arguments_kafka import ( + from docs.docs_src.getting_started.context.kafka.default_arguments import ( broker, handle, ) @@ -20,7 +21,7 @@ async def test_default_arguments_kafka(): @pytest.mark.asyncio async def test_default_arguments_rabbit(): - from docs.docs_src.getting_started.context.default_arguments_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.default_arguments import ( broker, handle, ) @@ -33,7 +34,7 @@ async def test_default_arguments_rabbit(): @pytest.mark.asyncio async def test_default_arguments_nats(): - from docs.docs_src.getting_started.context.default_arguments_nats import ( + from docs.docs_src.getting_started.context.nats.default_arguments import ( broker, handle, ) @@ -42,3 +43,16 @@ async def test_default_arguments_nats(): await br.publish("Hi!", "test-subject") handle.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_default_arguments_redis(): + from docs.docs_src.getting_started.context.redis.default_arguments import ( + broker, + handle, + ) + + async with TestRedisBroker(broker) as br: + await br.publish("Hi!", "test-channel") + + handle.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/context/test_existed_context.py b/tests/docs/getting_started/context/test_existed_context.py index 223ab64dc9..422d175423 100644 --- a/tests/docs/getting_started/context/test_existed_context.py +++ b/tests/docs/getting_started/context/test_existed_context.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_existed_context_kafka(): - from docs.docs_src.getting_started.context.existed_context_kafka import ( + from docs.docs_src.getting_started.context.kafka.existed_context import ( broker_object, ) @@ -24,7 +25,7 @@ async def resp(): @pytest.mark.asyncio async def test_existed_context_rabbit(): - from docs.docs_src.getting_started.context.existed_context_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.existed_context import ( broker_object, ) @@ -41,7 +42,7 @@ async def resp(): @pytest.mark.asyncio async def test_existed_context_nats(): - from docs.docs_src.getting_started.context.existed_context_nats import ( + from docs.docs_src.getting_started.context.nats.existed_context import ( broker_object, ) @@ -54,3 +55,20 @@ async def resp(): await br.publish("Hi!", "response-subject") assert resp.mock.call_count == 2 + + +@pytest.mark.asyncio +async def test_existed_context_redis(): + from docs.docs_src.getting_started.context.redis.existed_context import ( + broker_object, + ) + + @broker_object.subscriber("response") + async def resp(): + ... + + async with TestRedisBroker(broker_object) as br: + await br.publish("Hi!", "test-channel") + await br.publish("Hi!", "response-channel") + + assert resp.mock.call_count == 2 diff --git a/tests/docs/getting_started/context/test_fields_access.py b/tests/docs/getting_started/context/test_fields_access.py index c8b08e73a9..3c0a82b4c0 100644 --- a/tests/docs/getting_started/context/test_fields_access.py +++ b/tests/docs/getting_started/context/test_fields_access.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_fields_access_kafka(): - from docs.docs_src.getting_started.context.fields_access_kafka import ( + from docs.docs_src.getting_started.context.kafka.fields_access import ( broker, handle, ) @@ -20,7 +21,7 @@ async def test_fields_access_kafka(): @pytest.mark.asyncio async def test_fields_access_rabbit(): - from docs.docs_src.getting_started.context.fields_access_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.fields_access import ( broker, handle, ) @@ -33,7 +34,7 @@ async def test_fields_access_rabbit(): @pytest.mark.asyncio async def test_fields_access_nats(): - from docs.docs_src.getting_started.context.fields_access_nats import ( + from docs.docs_src.getting_started.context.nats.fields_access import ( broker, handle, ) @@ -42,3 +43,16 @@ async def test_fields_access_nats(): await br.publish("Hi!", "test-subject", headers={"user": "John"}) handle.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_fields_access_redis(): + from docs.docs_src.getting_started.context.redis.fields_access import ( + broker, + handle, + ) + + async with TestRedisBroker(broker) as br: + await br.publish("Hi!", "test-channel", headers={"user": "John"}) + + handle.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/context/test_manual_local_context.py b/tests/docs/getting_started/context/test_manual_local_context.py index 5e564911ed..081bb229f3 100644 --- a/tests/docs/getting_started/context/test_manual_local_context.py +++ b/tests/docs/getting_started/context/test_manual_local_context.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_manual_local_context_kafka(): - from docs.docs_src.getting_started.context.manual_local_context_kafka import ( + from docs.docs_src.getting_started.context.kafka.manual_local_context import ( broker, handle, ) @@ -20,7 +21,7 @@ async def test_manual_local_context_kafka(): @pytest.mark.asyncio async def test_manual_local_context_rabbit(): - from docs.docs_src.getting_started.context.manual_local_context_rabbit import ( + from docs.docs_src.getting_started.context.rabbit.manual_local_context import ( broker, handle, ) @@ -33,7 +34,7 @@ async def test_manual_local_context_rabbit(): @pytest.mark.asyncio async def test_manual_local_context_nats(): - from docs.docs_src.getting_started.context.manual_local_context_nats import ( + from docs.docs_src.getting_started.context.nats.manual_local_context import ( broker, handle, ) @@ -42,3 +43,16 @@ async def test_manual_local_context_nats(): await br.publish("Hi!", "test-subject") handle.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_manual_local_context_redis(): + from docs.docs_src.getting_started.context.redis.manual_local_context import ( + broker, + handle, + ) + + async with TestRedisBroker(broker) as br: + await br.publish("Hi!", "test-channel") + + handle.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/dependencies/basic/test_nested_depends.py b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py index f1e7c1b737..bc775ed4e5 100644 --- a/tests/docs/getting_started/dependencies/basic/test_nested_depends.py +++ b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py @@ -3,6 +3,7 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio @@ -39,3 +40,15 @@ async def test_nested_depends_nats(): async with TestNatsBroker(broker): await broker.publish({}, "test") handler.mock.assert_called_once_with({}) + + +@pytest.mark.asyncio +async def test_nested_depends_redis(): + from docs.docs_src.getting_started.dependencies.basic.redis.nested_depends import ( + broker, + handler, + ) + + async with TestRedisBroker(broker): + await broker.publish({}, "test") + handler.mock.assert_called_once_with({}) diff --git a/tests/docs/getting_started/dependencies/test_basic.py b/tests/docs/getting_started/dependencies/test_basic.py index 63f578d852..97a14cf1c9 100644 --- a/tests/docs/getting_started/dependencies/test_basic.py +++ b/tests/docs/getting_started/dependencies/test_basic.py @@ -12,11 +12,10 @@ async def test_basic_kafka(): handle, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with( - { - "name": "John", - "user_id": 1, - } - ) + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with( + { + "name": "John", + "user_id": 1, + } + ) diff --git a/tests/docs/getting_started/dependencies/test_class.py b/tests/docs/getting_started/dependencies/test_class.py index 48e4a67e48..5aa8f134d2 100644 --- a/tests/docs/getting_started/dependencies/test_class.py +++ b/tests/docs/getting_started/dependencies/test_class.py @@ -12,11 +12,10 @@ async def test_basic_kafka(): handle, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with( - { - "name": "John", - "user_id": 1, - } - ) + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with( + { + "name": "John", + "user_id": 1, + } + ) diff --git a/tests/docs/getting_started/dependencies/test_global.py b/tests/docs/getting_started/dependencies/test_global.py index b5ded42fe7..23d0a9be96 100644 --- a/tests/docs/getting_started/dependencies/test_global.py +++ b/tests/docs/getting_started/dependencies/test_global.py @@ -12,14 +12,13 @@ async def test_global_kafka(): handle, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with( - { - "name": "John", - "user_id": 1, - } - ) + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with( + { + "name": "John", + "user_id": 1, + } + ) - with pytest.raises(ValueError): - await broker.publish({"name": "Ted", "user_id": 1}, "test-topic") + with pytest.raises(ValueError): + await broker.publish({"name": "Ted", "user_id": 1}, "test-topic") diff --git a/tests/docs/getting_started/dependencies/test_global_broker.py b/tests/docs/getting_started/dependencies/test_global_broker.py index afe4134462..cf28402530 100644 --- a/tests/docs/getting_started/dependencies/test_global_broker.py +++ b/tests/docs/getting_started/dependencies/test_global_broker.py @@ -12,14 +12,13 @@ async def test_global_broker_kafka(): handle, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with( - { - "name": "John", - "user_id": 1, - } - ) + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with( + { + "name": "John", + "user_id": 1, + } + ) - with pytest.raises(ValueError): - await broker.publish({"name": "Ted", "user_id": 1}, "test-topic") + with pytest.raises(ValueError): + await broker.publish({"name": "Ted", "user_id": 1}, "test-topic") diff --git a/tests/docs/getting_started/dependencies/test_sub_dep.py b/tests/docs/getting_started/dependencies/test_sub_dep.py index bac5e0951b..9e016076b0 100644 --- a/tests/docs/getting_started/dependencies/test_sub_dep.py +++ b/tests/docs/getting_started/dependencies/test_sub_dep.py @@ -12,14 +12,13 @@ async def test_sub_dep_kafka(): handle, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with( - { - "name": "John", - "user_id": 1, - } - ) + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with( + { + "name": "John", + "user_id": 1, + } + ) - with pytest.raises(AssertionError): - await broker.publish({"name": "Ted", "user_id": 1}, "test-topic") + with pytest.raises(AssertionError): + await broker.publish({"name": "Ted", "user_id": 1}, "test-topic") diff --git a/tests/docs/getting_started/dependencies/test_yield.py b/tests/docs/getting_started/dependencies/test_yield.py index 0fc8d3b44a..93df041d25 100644 --- a/tests/docs/getting_started/dependencies/test_yield.py +++ b/tests/docs/getting_started/dependencies/test_yield.py @@ -12,6 +12,5 @@ async def test_yield_kafka(): handle, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/index/test_basic.py b/tests/docs/getting_started/index/test_basic.py index e543aed58f..3a24edb783 100644 --- a/tests/docs/getting_started/index/test_basic.py +++ b/tests/docs/getting_started/index/test_basic.py @@ -3,6 +3,7 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio @@ -33,3 +34,13 @@ async def test_quickstart_index_nats(): await br.publish("", "test") base_handler.mock.assert_called_once_with("") + + +@pytest.mark.asyncio +async def test_quickstart_index_redis(): + from docs.docs_src.getting_started.index.base_redis import base_handler, broker + + async with TestRedisBroker(broker) as br: + await br.publish("", "test") + + base_handler.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/lifespan/test_basic.py b/tests/docs/getting_started/lifespan/test_basic.py index a5c31c70d2..7f0262fe91 100644 --- a/tests/docs/getting_started/lifespan/test_basic.py +++ b/tests/docs/getting_started/lifespan/test_basic.py @@ -4,6 +4,7 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker from tests.marks import pydanticV2 from tests.mocks import mock_pydantic_settings_env @@ -14,9 +15,8 @@ async def test_rabbit_basic_lifespan(): with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.rabbit.basic import app, broker - async with TestRabbitBroker(broker): - async with TestApp(app): - assert context.get("settings").host == "localhost" + async with TestRabbitBroker(broker), TestApp(app): + assert context.get("settings").host == "localhost" @pydanticV2 @@ -25,9 +25,8 @@ async def test_kafka_basic_lifespan(): with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.kafka.basic import app, broker - async with TestKafkaBroker(broker): - async with TestApp(app): - assert context.get("settings").host == "localhost" + async with TestKafkaBroker(broker), TestApp(app): + assert context.get("settings").host == "localhost" @pydanticV2 @@ -36,6 +35,15 @@ async def test_nats_basic_lifespan(): with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.nats.basic import app, broker - async with TestNatsBroker(broker): - async with TestApp(app): - assert context.get("settings").host == "localhost" + async with TestNatsBroker(broker), TestApp(app): + assert context.get("settings").host == "localhost" + + +@pydanticV2 +@pytest.mark.asyncio +async def test_redis_basic_lifespan(): + with mock_pydantic_settings_env({"host": "localhost"}): + from docs.docs_src.getting_started.lifespan.redis.basic import app, broker + + async with TestRedisBroker(broker), TestApp(app): + assert context.get("settings").host == "localhost" diff --git a/tests/docs/getting_started/lifespan/test_ml.py b/tests/docs/getting_started/lifespan/test_ml.py index 31b53d62cd..69cb96fac1 100644 --- a/tests/docs/getting_started/lifespan/test_ml.py +++ b/tests/docs/getting_started/lifespan/test_ml.py @@ -4,36 +4,44 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_rabbit_ml_lifespan(): from docs.docs_src.getting_started.lifespan.rabbit.ml import app, broker, predict - async with TestRabbitBroker(broker): - async with TestApp(app): - assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + async with TestRabbitBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) - predict.mock.assert_called_once_with(1.0) + predict.mock.assert_called_once_with(1.0) @pytest.mark.asyncio async def test_kafka_ml_lifespan(): from docs.docs_src.getting_started.lifespan.kafka.ml import app, broker, predict - async with TestKafkaBroker(broker): - async with TestApp(app): - assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + async with TestKafkaBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) - predict.mock.assert_called_once_with(1.0) + predict.mock.assert_called_once_with(1.0) @pytest.mark.asyncio async def test_nats_ml_lifespan(): from docs.docs_src.getting_started.lifespan.nats.ml import app, broker, predict - async with TestNatsBroker(broker): - async with TestApp(app): - assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + async with TestNatsBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) - predict.mock.assert_called_once_with(1.0) + predict.mock.assert_called_once_with(1.0) + + +@pytest.mark.asyncio +async def test_redis_ml_lifespan(): + from docs.docs_src.getting_started.lifespan.redis.ml import app, broker, predict + + async with TestRedisBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + + predict.mock.assert_called_once_with(1.0) diff --git a/tests/docs/getting_started/lifespan/test_ml_context.py b/tests/docs/getting_started/lifespan/test_ml_context.py new file mode 100644 index 0000000000..2ca25ecf6c --- /dev/null +++ b/tests/docs/getting_started/lifespan/test_ml_context.py @@ -0,0 +1,63 @@ +import pytest + +from faststream import TestApp +from faststream.kafka import TestKafkaBroker +from faststream.nats import TestNatsBroker +from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker + + +@pytest.mark.asyncio +async def test_rabbit_ml_lifespan(): + from docs.docs_src.getting_started.lifespan.rabbit.ml_context import ( + app, + broker, + predict, + ) + + async with TestRabbitBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + + predict.mock.assert_called_once_with(1.0) + + +@pytest.mark.asyncio +async def test_kafka_ml_lifespan(): + from docs.docs_src.getting_started.lifespan.kafka.ml_context import ( + app, + broker, + predict, + ) + + async with TestKafkaBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + + predict.mock.assert_called_once_with(1.0) + + +@pytest.mark.asyncio +async def test_nats_ml_lifespan(): + from docs.docs_src.getting_started.lifespan.nats.ml_context import ( + app, + broker, + predict, + ) + + async with TestNatsBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + + predict.mock.assert_called_once_with(1.0) + + +@pytest.mark.asyncio +async def test_redis_ml_lifespan(): + from docs.docs_src.getting_started.lifespan.redis.ml_context import ( + app, + broker, + predict, + ) + + async with TestRedisBroker(broker), TestApp(app): + assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) + + predict.mock.assert_called_once_with(1.0) diff --git a/tests/docs/getting_started/lifespan/test_testing.py b/tests/docs/getting_started/lifespan/test_testing.py index 4a9e27b93f..c9845f1583 100644 --- a/tests/docs/getting_started/lifespan/test_testing.py +++ b/tests/docs/getting_started/lifespan/test_testing.py @@ -1,15 +1,18 @@ from docs.docs_src.getting_started.lifespan.kafka.testing import ( - test_lifespan as test_lifespan_k, + test_lifespan as _test_lifespan_k, ) from docs.docs_src.getting_started.lifespan.nats.testing import ( - test_lifespan as test_lifespan_n, + test_lifespan as _test_lifespan_n, ) from docs.docs_src.getting_started.lifespan.rabbit.testing import ( - test_lifespan as test_lifespan_r, + test_lifespan as _test_lifespan_r, ) - -__all__ = ( - "test_lifespan_k", - "test_lifespan_r", - "test_lifespan_n", +from docs.docs_src.getting_started.lifespan.redis.testing import ( + test_lifespan as _test_lifespan_red, ) +from tests.marks import python39 + +test_lifespan_red = python39(_test_lifespan_red) +test_lifespan_red = python39(_test_lifespan_r) +test_lifespan_red = python39(_test_lifespan_n) +test_lifespan_red = python39(_test_lifespan_k) diff --git a/tests/docs/getting_started/publishing/test_broker.py b/tests/docs/getting_started/publishing/test_broker.py index c2928c1ce2..640a1f36f8 100644 --- a/tests/docs/getting_started/publishing/test_broker.py +++ b/tests/docs/getting_started/publishing/test_broker.py @@ -4,48 +4,60 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_broker_kafka(): - from docs.docs_src.getting_started.publishing.broker_kafka import ( + from docs.docs_src.getting_started.publishing.kafka.broker import ( app, broker, handle, handle_next, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") - handle_next.mock.assert_called_once_with("Hi!") + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_broker_rabbit(): - from docs.docs_src.getting_started.publishing.broker_rabbit import ( + from docs.docs_src.getting_started.publishing.rabbit.broker import ( app, broker, handle, handle_next, ) - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") - handle_next.mock.assert_called_once_with("Hi!") + async with TestRabbitBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_broker_nats(): - from docs.docs_src.getting_started.publishing.broker_nats import ( + from docs.docs_src.getting_started.publishing.nats.broker import ( app, broker, handle, handle_next, ) - async with TestNatsBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") - handle_next.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_broker_redis(): + from docs.docs_src.getting_started.publishing.redis.broker import ( + app, + broker, + handle, + handle_next, + ) + + async with TestRedisBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/publishing/test_broker_context.py b/tests/docs/getting_started/publishing/test_broker_context.py index 2a2556f057..210cffdb5e 100644 --- a/tests/docs/getting_started/publishing/test_broker_context.py +++ b/tests/docs/getting_started/publishing/test_broker_context.py @@ -4,48 +4,60 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio @pytest.mark.kafka async def test_broker_context_kafka(): - from docs.docs_src.getting_started.publishing.broker_context_kafka import ( + from docs.docs_src.getting_started.publishing.kafka.broker_context import ( app, broker, handle, ) - async with TestKafkaBroker(broker, connect_only=True, with_real=True): - async with TestApp(app): - await handle.wait_call(3) - handle.mock.assert_called_once_with("Hi!") + async with TestKafkaBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(3) + handle.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio @pytest.mark.nats async def test_broker_context_nats(): - from docs.docs_src.getting_started.publishing.broker_context_nats import ( + from docs.docs_src.getting_started.publishing.nats.broker_context import ( app, broker, handle, ) - async with TestNatsBroker(broker, connect_only=True, with_real=True): - async with TestApp(app): - await handle.wait_call(3) - handle.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(3) + handle.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio @pytest.mark.rabbit async def test_broker_context_rabbit(): - from docs.docs_src.getting_started.publishing.broker_context_rabbit import ( + from docs.docs_src.getting_started.publishing.rabbit.broker_context import ( app, broker, handle, ) - async with TestRabbitBroker(broker, connect_only=True, with_real=True): - async with TestApp(app): - await handle.wait_call(3) - handle.mock.assert_called_once_with("Hi!") + async with TestRabbitBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(3) + handle.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +@pytest.mark.redis +async def test_broker_context_redis(): + from docs.docs_src.getting_started.publishing.redis.broker_context import ( + app, + broker, + handle, + ) + + async with TestRedisBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(3) + handle.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/publishing/test_decorator.py b/tests/docs/getting_started/publishing/test_decorator.py index da07d79c14..31479eb10e 100644 --- a/tests/docs/getting_started/publishing/test_decorator.py +++ b/tests/docs/getting_started/publishing/test_decorator.py @@ -4,51 +4,64 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_decorator_kafka(): - from docs.docs_src.getting_started.publishing.decorator_kafka import ( + from docs.docs_src.getting_started.publishing.kafka.decorator import ( app, broker, handle, handle_next, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") - handle_next.mock.assert_called_once_with("Hi!") - list(broker._publishers.values())[0].mock.assert_called_once_with("Hi!") + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") + list(broker._publishers.values())[0].mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_decorator_rabbit(): - from docs.docs_src.getting_started.publishing.decorator_rabbit import ( + from docs.docs_src.getting_started.publishing.rabbit.decorator import ( app, broker, handle, handle_next, ) - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") - handle_next.mock.assert_called_once_with("Hi!") - list(broker._publishers.values())[0].mock.assert_called_once_with("Hi!") + async with TestRabbitBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") + list(broker._publishers.values())[0].mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_decorator_nats(): - from docs.docs_src.getting_started.publishing.decorator_nats import ( + from docs.docs_src.getting_started.publishing.nats.decorator import ( app, broker, handle, handle_next, ) - async with TestNatsBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") - handle_next.mock.assert_called_once_with("Hi!") - list(broker._publishers.values())[0].mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") + list(broker._publishers.values())[0].mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_decorator_redis(): + from docs.docs_src.getting_started.publishing.redis.decorator import ( + app, + broker, + handle, + handle_next, + ) + + async with TestRedisBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + handle_next.mock.assert_called_once_with("Hi!") + list(broker._publishers.values())[0].mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/publishing/test_direct.py b/tests/docs/getting_started/publishing/test_direct.py index ecd9570b93..a78b232d63 100644 --- a/tests/docs/getting_started/publishing/test_direct.py +++ b/tests/docs/getting_started/publishing/test_direct.py @@ -1,15 +1,19 @@ -from docs.docs_src.getting_started.publishing.direct_kafka_testing import ( +from docs.docs_src.getting_started.publishing.kafka.direct_testing import ( test_handle as test_handle_k, ) -from docs.docs_src.getting_started.publishing.direct_nats_testing import ( +from docs.docs_src.getting_started.publishing.nats.direct_testing import ( test_handle as test_handle_n, ) -from docs.docs_src.getting_started.publishing.direct_rabbit_testing import ( +from docs.docs_src.getting_started.publishing.rabbit.direct_testing import ( test_handle as test_handle_r, ) +from docs.docs_src.getting_started.publishing.redis.direct_testing import ( + test_handle as test_handle_red, +) __all__ = ( "test_handle_r", "test_handle_k", "test_handle_n", + "test_handle_red", ) diff --git a/tests/docs/getting_started/publishing/test_kafka_direct.py b/tests/docs/getting_started/publishing/test_kafka_direct.py deleted file mode 100644 index ddf8951c3c..0000000000 --- a/tests/docs/getting_started/publishing/test_kafka_direct.py +++ /dev/null @@ -1,3 +0,0 @@ -from docs.docs_src.getting_started.publishing.direct_kafka_testing import test_handle - -__all__ = ("test_handle",) diff --git a/tests/docs/getting_started/publishing/test_kafka_object.py b/tests/docs/getting_started/publishing/test_kafka_object.py deleted file mode 100644 index c97675caa8..0000000000 --- a/tests/docs/getting_started/publishing/test_kafka_object.py +++ /dev/null @@ -1,3 +0,0 @@ -from docs.docs_src.getting_started.publishing.object_kafka_testing import test_handle - -__all__ = ("test_handle",) diff --git a/tests/docs/getting_started/publishing/test_object.py b/tests/docs/getting_started/publishing/test_object.py index 2a04c1ad5f..d9caa1b37f 100644 --- a/tests/docs/getting_started/publishing/test_object.py +++ b/tests/docs/getting_started/publishing/test_object.py @@ -1,15 +1,19 @@ -from docs.docs_src.getting_started.publishing.object_kafka_testing import ( +from docs.docs_src.getting_started.publishing.kafka.object_testing import ( test_handle as test_handle_k, ) -from docs.docs_src.getting_started.publishing.object_nats_testing import ( +from docs.docs_src.getting_started.publishing.nats.object_testing import ( test_handle as test_handle_n, ) -from docs.docs_src.getting_started.publishing.object_rabbit_testing import ( +from docs.docs_src.getting_started.publishing.rabbit.object_testing import ( test_handle as test_handle_r, ) +from docs.docs_src.getting_started.publishing.redis.object_testing import ( + test_handle as test_handle_red, +) __all__ = ( "test_handle_k", "test_handle_r", "test_handle_n", + "test_handle_red", ) diff --git a/tests/docs/getting_started/publishing/test_rabbit_direct.py b/tests/docs/getting_started/publishing/test_rabbit_direct.py deleted file mode 100644 index 23d48a2cd9..0000000000 --- a/tests/docs/getting_started/publishing/test_rabbit_direct.py +++ /dev/null @@ -1,3 +0,0 @@ -from docs.docs_src.getting_started.publishing.direct_rabbit_testing import test_handle - -__all__ = ("test_handle",) diff --git a/tests/docs/getting_started/publishing/test_rabbit_object.py b/tests/docs/getting_started/publishing/test_rabbit_object.py deleted file mode 100644 index dd65b33091..0000000000 --- a/tests/docs/getting_started/publishing/test_rabbit_object.py +++ /dev/null @@ -1,3 +0,0 @@ -from docs.docs_src.getting_started.publishing.object_rabbit_testing import test_handle - -__all__ = ("test_handle",) diff --git a/tests/docs/getting_started/routers/test_base.py b/tests/docs/getting_started/routers/test_base.py index 662b7031ee..1baa6bf630 100644 --- a/tests/docs/getting_started/routers/test_base.py +++ b/tests/docs/getting_started/routers/test_base.py @@ -4,48 +4,60 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_base_router_kafka(): - from docs.docs_src.getting_started.routers.router_kafka import ( + from docs.docs_src.getting_started.routers.kafka.router import ( app, broker, handle, handle_response, ) - async with TestKafkaBroker(broker): - async with TestApp(app): - handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) - handle_response.mock.assert_called_once_with("Hi!") + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + handle_response.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_base_router_rabbit(): - from docs.docs_src.getting_started.routers.router_rabbit import ( + from docs.docs_src.getting_started.routers.rabbit.router import ( app, broker, handle, handle_response, ) - async with TestRabbitBroker(broker): - async with TestApp(app): - handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) - handle_response.mock.assert_called_once_with("Hi!") + async with TestRabbitBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + handle_response.mock.assert_called_once_with("Hi!") @pytest.mark.asyncio async def test_base_router_nats(): - from docs.docs_src.getting_started.routers.router_nats import ( + from docs.docs_src.getting_started.routers.nats.router import ( app, broker, handle, handle_response, ) - async with TestNatsBroker(broker): - async with TestApp(app): - handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) - handle_response.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + handle_response.mock.assert_called_once_with("Hi!") + + +@pytest.mark.asyncio +async def test_base_router_redis(): + from docs.docs_src.getting_started.routers.redis.router import ( + app, + broker, + handle, + handle_response, + ) + + async with TestRedisBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + handle_response.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/routers/test_delay.py b/tests/docs/getting_started/routers/test_delay.py index 1138c7a987..87d09f76bf 100644 --- a/tests/docs/getting_started/routers/test_delay.py +++ b/tests/docs/getting_started/routers/test_delay.py @@ -4,45 +4,56 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_delay_router_kafka(): - from docs.docs_src.getting_started.routers.router_delay_kafka import ( + from docs.docs_src.getting_started.routers.kafka.router_delay import ( app, broker, ) - async with TestKafkaBroker(broker) as br: - async with TestApp(app): - list(br.handlers.values())[0].calls[0][0].mock.assert_called_once_with( - {"name": "John", "user_id": 1} - ) + async with TestKafkaBroker(broker) as br, TestApp(app): + list(br.handlers.values())[0].calls[0][0].mock.assert_called_once_with( + {"name": "John", "user_id": 1} + ) @pytest.mark.asyncio async def test_delay_router_rabbit(): - from docs.docs_src.getting_started.routers.router_delay_rabbit import ( + from docs.docs_src.getting_started.routers.rabbit.router_delay import ( app, broker, ) - async with TestRabbitBroker(broker) as br: - async with TestApp(app): - list(br.handlers.values())[0].calls[0][0].mock.assert_called_once_with( - {"name": "John", "user_id": 1} - ) + async with TestRabbitBroker(broker) as br, TestApp(app): + list(br.handlers.values())[0].calls[0][0].mock.assert_called_once_with( + {"name": "John", "user_id": 1} + ) @pytest.mark.asyncio async def test_delay_router_nats(): - from docs.docs_src.getting_started.routers.router_delay_nats import ( + from docs.docs_src.getting_started.routers.nats.router_delay import ( app, broker, ) - async with TestNatsBroker(broker) as br: - async with TestApp(app): - list(br.handlers.values())[0].calls[0][0].mock.assert_called_once_with( - {"name": "John", "user_id": 1} - ) + async with TestNatsBroker(broker) as br, TestApp(app): + list(br.handlers.values())[0].calls[0][0].mock.assert_called_once_with( + {"name": "John", "user_id": 1} + ) + + +@pytest.mark.asyncio +async def test_delay_router_redis(): + from docs.docs_src.getting_started.routers.redis.router_delay import ( + app, + broker, + ) + + async with TestRedisBroker(broker) as br, TestApp(app): + list(br.handlers.values())[0].calls[0][0].mock.assert_called_once_with( + {"name": "John", "user_id": 1} + ) diff --git a/tests/docs/getting_started/serialization/test_parser.py b/tests/docs/getting_started/serialization/test_parser.py index 6a40f16010..29f36921b8 100644 --- a/tests/docs/getting_started/serialization/test_parser.py +++ b/tests/docs/getting_started/serialization/test_parser.py @@ -4,6 +4,7 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio @@ -14,9 +15,8 @@ async def test_parser_nats(): handle, ) - async with TestNatsBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") + async with TestNatsBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") @pytest.mark.asyncio @@ -27,9 +27,8 @@ async def test_parser_kafka(): handle, ) - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") @pytest.mark.asyncio @@ -40,6 +39,17 @@ async def test_parser_rabbit(): handle, ) - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("") + async with TestRabbitBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") + + +@pytest.mark.asyncio +async def test_parser_redis(): + from docs.docs_src.getting_started.serialization.parser_redis import ( + app, + broker, + handle, + ) + + async with TestRedisBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/subscription/test_filter.py b/tests/docs/getting_started/subscription/test_filter.py index 5ccb2d6f3a..5e92e81996 100644 --- a/tests/docs/getting_started/subscription/test_filter.py +++ b/tests/docs/getting_started/subscription/test_filter.py @@ -4,48 +4,60 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_kafka_filtering(): - from docs.docs_src.getting_started.subscription.filter_kafka import ( + from docs.docs_src.getting_started.subscription.kafka.filter import ( app, broker, default_handler, handle, ) - async with TestKafkaBroker(broker): - async with TestApp(app): - handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) - default_handler.mock.assert_called_once_with("Hello, FastStream!") + async with TestKafkaBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + default_handler.mock.assert_called_once_with("Hello, FastStream!") @pytest.mark.asyncio async def test_rabbit_filtering(): - from docs.docs_src.getting_started.subscription.filter_rabbit import ( + from docs.docs_src.getting_started.subscription.rabbit.filter import ( app, broker, default_handler, handle, ) - async with TestRabbitBroker(broker): - async with TestApp(app): - handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) - default_handler.mock.assert_called_once_with("Hello, FastStream!") + async with TestRabbitBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + default_handler.mock.assert_called_once_with("Hello, FastStream!") @pytest.mark.asyncio async def test_nats_filtering(): - from docs.docs_src.getting_started.subscription.filter_nats import ( + from docs.docs_src.getting_started.subscription.nats.filter import ( app, broker, default_handler, handle, ) - async with TestNatsBroker(broker): - async with TestApp(app): - handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) - default_handler.mock.assert_called_once_with("Hello, FastStream!") + async with TestNatsBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + default_handler.mock.assert_called_once_with("Hello, FastStream!") + + +@pytest.mark.asyncio +async def test_redis_filtering(): + from docs.docs_src.getting_started.subscription.redis.filter import ( + app, + broker, + default_handler, + handle, + ) + + async with TestRedisBroker(broker), TestApp(app): + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + default_handler.mock.assert_called_once_with("Hello, FastStream!") diff --git a/tests/docs/getting_started/subscription/test_pydantic.py b/tests/docs/getting_started/subscription/test_pydantic.py index 391241a975..302d7f13f2 100644 --- a/tests/docs/getting_started/subscription/test_pydantic.py +++ b/tests/docs/getting_started/subscription/test_pydantic.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_pydantic_model_rabbit(): - from docs.docs_src.getting_started.subscription.pydantic_model_rabbit import ( + from docs.docs_src.getting_started.subscription.rabbit.pydantic_model import ( broker, handle, ) @@ -19,7 +20,7 @@ async def test_pydantic_model_rabbit(): @pytest.mark.asyncio async def test_pydantic_model_kafka(): - from docs.docs_src.getting_started.subscription.pydantic_model_kafka import ( + from docs.docs_src.getting_started.subscription.kafka.pydantic_model import ( broker, handle, ) @@ -31,7 +32,7 @@ async def test_pydantic_model_kafka(): @pytest.mark.asyncio async def test_pydantic_model_nats(): - from docs.docs_src.getting_started.subscription.pydantic_model_nats import ( + from docs.docs_src.getting_started.subscription.nats.pydantic_model import ( broker, handle, ) @@ -39,3 +40,15 @@ async def test_pydantic_model_nats(): async with TestNatsBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-subject") handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) + + +@pytest.mark.asyncio +async def test_pydantic_model_redis(): + from docs.docs_src.getting_started.subscription.redis.pydantic_model import ( + broker, + handle, + ) + + async with TestRedisBroker(broker) as br: + await br.publish({"name": "John", "user_id": 1}, "test-channel") + handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) diff --git a/tests/docs/getting_started/subscription/test_real.py b/tests/docs/getting_started/subscription/test_real.py index efe94d4d9e..4aba3ec33f 100644 --- a/tests/docs/getting_started/subscription/test_real.py +++ b/tests/docs/getting_started/subscription/test_real.py @@ -1,23 +1,29 @@ import pytest -from docs.docs_src.getting_started.subscription.real_testing_kafka import ( +from docs.docs_src.getting_started.subscription.kafka.real_testing import ( test_handle as test_handle_k, ) -from docs.docs_src.getting_started.subscription.real_testing_kafka import ( +from docs.docs_src.getting_started.subscription.kafka.real_testing import ( test_validation_error as test_validation_error_k, ) -from docs.docs_src.getting_started.subscription.real_testing_nats import ( +from docs.docs_src.getting_started.subscription.nats.real_testing import ( test_handle as test_handle_n, ) -from docs.docs_src.getting_started.subscription.real_testing_nats import ( +from docs.docs_src.getting_started.subscription.nats.real_testing import ( test_validation_error as test_validation_error_n, ) -from docs.docs_src.getting_started.subscription.real_testing_rabbit import ( +from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( test_handle as test_handle_r, ) -from docs.docs_src.getting_started.subscription.real_testing_rabbit import ( +from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( test_validation_error as test_validation_error_r, ) +from docs.docs_src.getting_started.subscription.redis.real_testing import ( + test_handle as test_handle_red, +) +from docs.docs_src.getting_started.subscription.redis.real_testing import ( + test_validation_error as test_validation_error_red, +) pytest.mark.kafka(test_handle_k) pytest.mark.kafka(test_validation_error_k) @@ -27,3 +33,6 @@ pytest.mark.nats(test_handle_n) pytest.mark.nats(test_validation_error_n) + +pytest.mark.redis(test_handle_red) +pytest.mark.redis(test_validation_error_red) diff --git a/tests/docs/getting_started/subscription/test_testing.py b/tests/docs/getting_started/subscription/test_testing.py index a918f5f28a..f51962944c 100644 --- a/tests/docs/getting_started/subscription/test_testing.py +++ b/tests/docs/getting_started/subscription/test_testing.py @@ -1,19 +1,19 @@ -from docs.docs_src.getting_started.subscription.testing_kafka import ( +from docs.docs_src.getting_started.subscription.kafka.testing import ( test_handle as test_handle_k, ) -from docs.docs_src.getting_started.subscription.testing_kafka import ( +from docs.docs_src.getting_started.subscription.kafka.testing import ( test_validation_error as test_validation_error_k, ) -from docs.docs_src.getting_started.subscription.testing_nats import ( +from docs.docs_src.getting_started.subscription.nats.testing import ( test_handle as test_handle_n, ) -from docs.docs_src.getting_started.subscription.testing_nats import ( +from docs.docs_src.getting_started.subscription.nats.testing import ( test_validation_error as test_validation_error_n, ) -from docs.docs_src.getting_started.subscription.testing_rabbit import ( +from docs.docs_src.getting_started.subscription.rabbit.testing import ( test_handle as test_handle_r, ) -from docs.docs_src.getting_started.subscription.testing_rabbit import ( +from docs.docs_src.getting_started.subscription.rabbit.testing import ( test_validation_error as test_validation_error_r, ) diff --git a/tests/docs/index/test_basic.py b/tests/docs/index/test_basic.py index b6a6bf32e8..ef1f3beb0f 100644 --- a/tests/docs/index/test_basic.py +++ b/tests/docs/index/test_basic.py @@ -3,11 +3,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_index_kafka_base(): - from docs.docs_src.index.basic_kafka import broker, handle_msg + from docs.docs_src.index.kafka.basic import broker, handle_msg async with TestKafkaBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-topic") @@ -21,7 +22,7 @@ async def test_index_kafka_base(): @pytest.mark.asyncio async def test_index_rabbit_base(): - from docs.docs_src.index.basic_rabbit import broker, handle_msg + from docs.docs_src.index.rabbit.basic import broker, handle_msg async with TestRabbitBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-queue") @@ -35,7 +36,7 @@ async def test_index_rabbit_base(): @pytest.mark.asyncio async def test_index_nats_base(): - from docs.docs_src.index.basic_nats import broker, handle_msg + from docs.docs_src.index.nats.basic import broker, handle_msg async with TestNatsBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-subject") @@ -45,3 +46,17 @@ async def test_index_nats_base(): list(br._publishers.values())[0].mock.assert_called_once_with( "User: 1 - John registered" ) + + +@pytest.mark.asyncio +async def test_index_redis_base(): + from docs.docs_src.index.redis.basic import broker, handle_msg + + async with TestRedisBroker(broker) as br: + await br.publish({"user": "John", "user_id": 1}, "in-channel") + + handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1}) + + list(br._publishers.values())[0].mock.assert_called_once_with( + "User: 1 - John registered" + ) diff --git a/tests/docs/index/test_pydantic.py b/tests/docs/index/test_pydantic.py index d0f829e5ed..62a13ff970 100644 --- a/tests/docs/index/test_pydantic.py +++ b/tests/docs/index/test_pydantic.py @@ -1,9 +1,11 @@ -from docs.docs_src.index.test_kafka import test_correct as test_k_correct -from docs.docs_src.index.test_kafka import test_invalid as test_k_invalid -from docs.docs_src.index.test_nats import test_correct as test_n_correct -from docs.docs_src.index.test_nats import test_invalid as test_n_invalid -from docs.docs_src.index.test_rabbit import test_correct as test_r_correct -from docs.docs_src.index.test_rabbit import test_invalid as test_r_invalid +from docs.docs_src.index.kafka.test import test_correct as test_k_correct +from docs.docs_src.index.kafka.test import test_invalid as test_k_invalid +from docs.docs_src.index.nats.test import test_correct as test_n_correct +from docs.docs_src.index.nats.test import test_invalid as test_n_invalid +from docs.docs_src.index.rabbit.test import test_correct as test_r_correct +from docs.docs_src.index.rabbit.test import test_invalid as test_r_invalid +from docs.docs_src.index.redis.test import test_correct as test_red_correct +from docs.docs_src.index.redis.test import test_invalid as test_red_invalid __all__ = ( "test_k_correct", @@ -12,4 +14,6 @@ "test_r_invalid", "test_n_correct", "test_n_invalid", + "test_red_correct", + "test_red_invalid", ) diff --git a/tests/docs/integration/fastapi/test_base.py b/tests/docs/integration/fastapi/test_base.py index 366c1c3f27..f9b9ab3b59 100644 --- a/tests/docs/integration/fastapi/test_base.py +++ b/tests/docs/integration/fastapi/test_base.py @@ -4,11 +4,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_fastapi_kafka_base(): - from docs.docs_src.integrations.fastapi.base_kafka import app, hello, router + from docs.docs_src.integrations.fastapi.kafka.base import app, hello, router async with TestKafkaBroker(router.broker) as br: with TestClient(app) as client: @@ -25,7 +26,7 @@ async def test_fastapi_kafka_base(): @pytest.mark.asyncio async def test_fastapi_rabbit_base(): - from docs.docs_src.integrations.fastapi.base_rabbit import app, hello, router + from docs.docs_src.integrations.fastapi.rabbit.base import app, hello, router async with TestRabbitBroker(router.broker) as br: with TestClient(app) as client: @@ -42,7 +43,7 @@ async def test_fastapi_rabbit_base(): @pytest.mark.asyncio async def test_fastapi_nats_base(): - from docs.docs_src.integrations.fastapi.base_nats import app, hello, router + from docs.docs_src.integrations.fastapi.nats.base import app, hello, router async with TestNatsBroker(router.broker) as br: with TestClient(app) as client: @@ -55,3 +56,20 @@ async def test_fastapi_nats_base(): list(br._publishers.values())[0].mock.assert_called_with( {"response": "Hello, NATS!"} ) + + +@pytest.mark.asyncio +async def test_fastapi_redis_base(): + from docs.docs_src.integrations.fastapi.redis.base import app, hello, router + + async with TestRedisBroker(router.broker) as br: + with TestClient(app) as client: + assert client.get("/").text == '"Hello, HTTP!"' + + await br.publish({"m": {}}, "test") + + hello.mock.assert_called_once_with({"m": {}}) + + list(br._publishers.values())[0].mock.assert_called_with( + {"response": "Hello, Redis!"} + ) diff --git a/tests/docs/integration/fastapi/test_depends.py b/tests/docs/integration/fastapi/test_depends.py index e23102ed98..e3391e2633 100644 --- a/tests/docs/integration/fastapi/test_depends.py +++ b/tests/docs/integration/fastapi/test_depends.py @@ -4,11 +4,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_fastapi_kafka_depends(): - from docs.docs_src.integrations.fastapi.depends_kafka import app, router + from docs.docs_src.integrations.fastapi.kafka.depends import app, router @router.subscriber("test") async def handler(): @@ -23,7 +24,7 @@ async def handler(): @pytest.mark.asyncio async def test_fastapi_rabbit_depends(): - from docs.docs_src.integrations.fastapi.depends_rabbit import app, router + from docs.docs_src.integrations.fastapi.rabbit.depends import app, router @router.subscriber("test") async def handler(): @@ -38,7 +39,7 @@ async def handler(): @pytest.mark.asyncio async def test_fastapi_nats_depends(): - from docs.docs_src.integrations.fastapi.depends_nats import app, router + from docs.docs_src.integrations.fastapi.nats.depends import app, router @router.subscriber("test") async def handler(): @@ -49,3 +50,18 @@ async def handler(): assert client.get("/").text == '"Hello, HTTP!"' handler.mock.assert_called_once_with("Hello, NATS!") + + +@pytest.mark.asyncio +async def test_fastapi_redis_depends(): + from docs.docs_src.integrations.fastapi.redis.depends import app, router + + @router.subscriber("test") + async def handler(): + ... + + async with TestRedisBroker(router.broker): + with TestClient(app) as client: + assert client.get("/").text == '"Hello, HTTP!"' + + handler.mock.assert_called_once_with("Hello, Redis!") diff --git a/tests/docs/integration/fastapi/test_send.py b/tests/docs/integration/fastapi/test_send.py index a1f9c5236d..0b790d5847 100644 --- a/tests/docs/integration/fastapi/test_send.py +++ b/tests/docs/integration/fastapi/test_send.py @@ -4,11 +4,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_fastapi_kafka_send(): - from docs.docs_src.integrations.fastapi.send_kafka import app, router + from docs.docs_src.integrations.fastapi.kafka.send import app, router @router.subscriber("test") async def handler(): @@ -23,7 +24,7 @@ async def handler(): @pytest.mark.asyncio async def test_fastapi_rabbit_send(): - from docs.docs_src.integrations.fastapi.send_rabbit import app, router + from docs.docs_src.integrations.fastapi.rabbit.send import app, router @router.subscriber("test") async def handler(): @@ -38,7 +39,7 @@ async def handler(): @pytest.mark.asyncio async def test_fastapi_nats_send(): - from docs.docs_src.integrations.fastapi.send_nats import app, router + from docs.docs_src.integrations.fastapi.nats.send import app, router @router.subscriber("test") async def handler(): @@ -49,3 +50,18 @@ async def handler(): assert client.get("/").text == '"Hello, HTTP!"' handler.mock.assert_called_once_with("Hello, NATS!") + + +@pytest.mark.asyncio +async def test_fastapi_redis_send(): + from docs.docs_src.integrations.fastapi.redis.send import app, router + + @router.subscriber("test") + async def handler(): + ... + + async with TestRedisBroker(router.broker): + with TestClient(app) as client: + assert client.get("/").text == '"Hello, HTTP!"' + + handler.mock.assert_called_once_with("Hello, Redis!") diff --git a/tests/docs/integration/fastapi/test_startup.py b/tests/docs/integration/fastapi/test_startup.py index 9eb459c29f..46dee36b57 100644 --- a/tests/docs/integration/fastapi/test_startup.py +++ b/tests/docs/integration/fastapi/test_startup.py @@ -4,11 +4,12 @@ from faststream.kafka import TestKafkaBroker from faststream.nats import TestNatsBroker from faststream.rabbit import TestRabbitBroker +from faststream.redis import TestRedisBroker @pytest.mark.asyncio async def test_fastapi_kafka_startup(): - from docs.docs_src.integrations.fastapi.startup_kafka import app, hello, router + from docs.docs_src.integrations.fastapi.kafka.startup import app, hello, router @router.subscriber("test") async def handler(): @@ -21,7 +22,7 @@ async def handler(): @pytest.mark.asyncio async def test_fastapi_rabbit_startup(): - from docs.docs_src.integrations.fastapi.startup_rabbit import app, hello, router + from docs.docs_src.integrations.fastapi.rabbit.startup import app, hello, router @router.subscriber("test") async def handler(): @@ -34,7 +35,7 @@ async def handler(): @pytest.mark.asyncio async def test_fastapi_nats_startup(): - from docs.docs_src.integrations.fastapi.startup_nats import app, hello, router + from docs.docs_src.integrations.fastapi.nats.startup import app, hello, router @router.subscriber("test") async def handler(): @@ -43,3 +44,16 @@ async def handler(): async with TestNatsBroker(router.broker): with TestClient(app): hello.mock.assert_called_once_with("Hello!") + + +@pytest.mark.asyncio +async def test_fastapi_redis_startup(): + from docs.docs_src.integrations.fastapi.redis.startup import app, hello, router + + @router.subscriber("test") + async def handler(): + ... + + async with TestRedisBroker(router.broker): + with TestClient(app): + hello.mock.assert_called_once_with("Hello!") diff --git a/tests/docs/integration/fastapi/test_test.py b/tests/docs/integration/fastapi/test_test.py index 352d739b3b..38fc07fd3f 100644 --- a/tests/docs/integration/fastapi/test_test.py +++ b/tests/docs/integration/fastapi/test_test.py @@ -1,9 +1,11 @@ -from docs.docs_src.integrations.fastapi.test_kafka import test_router as test_k -from docs.docs_src.integrations.fastapi.test_nats import test_router as test_n -from docs.docs_src.integrations.fastapi.test_rabbit import test_router as test_r +from docs.docs_src.integrations.fastapi.kafka.test import test_router as test_k +from docs.docs_src.integrations.fastapi.nats.test import test_router as test_n +from docs.docs_src.integrations.fastapi.rabbit.test import test_router as test_r +from docs.docs_src.integrations.fastapi.redis.test import test_router as test_red __all__ = ( "test_k", "test_r", "test_n", + "test_red", ) diff --git a/tests/docs/integration/http/test_fastapi.py b/tests/docs/integration/http/test_fastapi.py index 01ff4a4b3d..1ea9645e25 100644 --- a/tests/docs/integration/http/test_fastapi.py +++ b/tests/docs/integration/http/test_fastapi.py @@ -12,7 +12,7 @@ async def test_fastapi_raw_integration(): broker, ) - async with TestKafkaBroker(broker, connect_only=True): + async with TestKafkaBroker(broker): with TestClient(app): await broker.publish("", "test") diff --git a/tests/docs/kafka/ack/test_errors.py b/tests/docs/kafka/ack/test_errors.py index 5fe264f18b..a17bb1ad46 100644 --- a/tests/docs/kafka/ack/test_errors.py +++ b/tests/docs/kafka/ack/test_errors.py @@ -16,8 +16,7 @@ async def test_ack_exc(): with patch.object( AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) ) as m: - async with TestKafkaBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await handle.wait_call(10) + async with TestKafkaBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(10) - assert m.mock.call_count + assert m.mock.call_count diff --git a/tests/docs/nats/ack/test_errors.py b/tests/docs/nats/ack/test_errors.py index 1886658748..32e4379c15 100644 --- a/tests/docs/nats/ack/test_errors.py +++ b/tests/docs/nats/ack/test_errors.py @@ -13,8 +13,7 @@ async def test_ack_exc(): from docs.docs_src.nats.ack.errors import app, broker, handle with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: - async with TestNatsBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestNatsBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(3) - assert m.mock.call_count + assert m.mock.call_count diff --git a/tests/docs/nats/js/test_kv.py b/tests/docs/nats/js/test_kv.py index e8d124a6f4..d3fe6432f1 100644 --- a/tests/docs/nats/js/test_kv.py +++ b/tests/docs/nats/js/test_kv.py @@ -9,7 +9,6 @@ async def test_basic(): from docs.docs_src.nats.js.key_value import app, broker, handler - async with TestNatsBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker, with_real=True), TestApp(app): + await handler.wait_call(3.0) + handler.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/nats/js/test_object.py b/tests/docs/nats/js/test_object.py index ebbe85c992..208feccec2 100644 --- a/tests/docs/nats/js/test_object.py +++ b/tests/docs/nats/js/test_object.py @@ -9,7 +9,6 @@ async def test_basic(): from docs.docs_src.nats.js.object import app, broker, handler - async with TestNatsBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker, with_real=True), TestApp(app): + await handler.wait_call(3.0) + handler.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/rabbit/ack/test_errors.py b/tests/docs/rabbit/ack/test_errors.py index 01e3426acb..8e8f98e3c7 100644 --- a/tests/docs/rabbit/ack/test_errors.py +++ b/tests/docs/rabbit/ack/test_errors.py @@ -12,11 +12,8 @@ async def test_ack_exc(): from docs.docs_src.rabbit.ack.errors import app, broker, handle - async with TestRabbitBroker(broker, with_real=True, connect_only=True): - with patch.object( - IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) - ) as m: - async with TestApp(app): - await handle.wait_call(3) + with patch.object(IncomingMessage, "ack", spy_decorator(IncomingMessage.ack)) as m: + async with TestRabbitBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(3) - m.mock.assert_called_once() + m.mock.assert_called_once() diff --git a/tests/docs/rabbit/subscription/test_direct.py b/tests/docs/rabbit/subscription/test_direct.py index 3a05d0a3ba..a1cc052af2 100644 --- a/tests/docs/rabbit/subscription/test_direct.py +++ b/tests/docs/rabbit/subscription/test_direct.py @@ -12,7 +12,6 @@ async def test_index(): broker, ) - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - base_handler1.mock.assert_called_with("") - base_handler3.mock.assert_called_once_with("") + async with TestRabbitBroker(broker), TestApp(app): + base_handler1.mock.assert_called_with("") + base_handler3.mock.assert_called_once_with("") diff --git a/tests/docs/rabbit/subscription/test_fanout.py b/tests/docs/rabbit/subscription/test_fanout.py index 2c1842ec96..b46627c77a 100644 --- a/tests/docs/rabbit/subscription/test_fanout.py +++ b/tests/docs/rabbit/subscription/test_fanout.py @@ -13,10 +13,9 @@ async def test_index(): broker, ) - async with TestRabbitBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await base_handler1.wait_call(3) - await base_handler3.wait_call(3) + async with TestRabbitBroker(broker, with_real=True), TestApp(app): + await base_handler1.wait_call(3) + await base_handler3.wait_call(3) - base_handler1.mock.assert_called_with("") - base_handler3.mock.assert_called_with("") + base_handler1.mock.assert_called_with("") + base_handler3.mock.assert_called_with("") diff --git a/tests/docs/rabbit/subscription/test_header.py b/tests/docs/rabbit/subscription/test_header.py index f41b53173f..1b96b26a19 100644 --- a/tests/docs/rabbit/subscription/test_header.py +++ b/tests/docs/rabbit/subscription/test_header.py @@ -12,7 +12,6 @@ async def test_index(): broker, ) - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - assert base_handler1.mock.call_count == 3 - assert base_handler3.mock.call_count == 3 + async with TestRabbitBroker(broker), TestApp(app): + assert base_handler1.mock.call_count == 3 + assert base_handler3.mock.call_count == 3 diff --git a/tests/docs/rabbit/subscription/test_index.py b/tests/docs/rabbit/subscription/test_index.py index 960adfa735..185ab942e5 100644 --- a/tests/docs/rabbit/subscription/test_index.py +++ b/tests/docs/rabbit/subscription/test_index.py @@ -7,6 +7,5 @@ async def test_index(): from docs.docs_src.rabbit.subscription.index import app, broker, handle - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - handle.mock.assert_called_once_with("message") + async with TestRabbitBroker(broker), TestApp(app): + handle.mock.assert_called_once_with("message") diff --git a/tests/docs/rabbit/subscription/test_stream.py b/tests/docs/rabbit/subscription/test_stream.py index fc77cd1d17..80e244ca1f 100644 --- a/tests/docs/rabbit/subscription/test_stream.py +++ b/tests/docs/rabbit/subscription/test_stream.py @@ -8,8 +8,7 @@ async def test_stream(): from docs.docs_src.rabbit.subscription.stream import app, broker, handle - async with TestRabbitBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestRabbitBroker(broker, with_real=True), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("Hi!") + handle.mock.assert_called_with("Hi!") diff --git a/tests/docs/rabbit/subscription/test_topic.py b/tests/docs/rabbit/subscription/test_topic.py index a6a05c7c6e..45e2f51c9f 100644 --- a/tests/docs/rabbit/subscription/test_topic.py +++ b/tests/docs/rabbit/subscription/test_topic.py @@ -12,7 +12,6 @@ async def test_index(): broker, ) - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - assert base_handler1.mock.call_count == 3 - assert base_handler3.mock.call_count == 1 + async with TestRabbitBroker(broker), TestApp(app): + assert base_handler1.mock.call_count == 3 + assert base_handler3.mock.call_count == 1 diff --git a/tests/examples/kafka/test_batch_consume.py b/tests/examples/kafka/test_batch_consume.py index 67411f0ce9..0f1dade28c 100644 --- a/tests/examples/kafka/test_batch_consume.py +++ b/tests/examples/kafka/test_batch_consume.py @@ -6,7 +6,6 @@ @pytest.mark.asyncio async def test_example(): - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestKafkaBroker(broker), TestApp(app): + await handle.wait_call(3) assert set(handle.mock.call_args[0][0]) == {"hi", "FastStream"} diff --git a/tests/examples/kafka/test_batch_publish_1.py b/tests/examples/kafka/test_batch_publish_1.py index bdd6afcdb7..88bee705ee 100644 --- a/tests/examples/kafka/test_batch_publish_1.py +++ b/tests/examples/kafka/test_batch_publish_1.py @@ -6,7 +6,6 @@ @pytest.mark.asyncio async def test_example(): - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestKafkaBroker(broker), TestApp(app): + await handle.wait_call(3) assert set(handle.mock.call_args[0][0]) == {"hi", "FastStream"} diff --git a/tests/examples/kafka/test_batch_publish_2.py b/tests/examples/kafka/test_batch_publish_2.py index 4250b451dd..974e279eab 100644 --- a/tests/examples/kafka/test_batch_publish_2.py +++ b/tests/examples/kafka/test_batch_publish_2.py @@ -6,7 +6,6 @@ @pytest.mark.asyncio async def test_example(): - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestKafkaBroker(broker), TestApp(app): + await handle.wait_call(3) assert set(handle.mock.call_args[0][0]) == {"hi", "FastStream"} diff --git a/tests/examples/kafka/test_batch_publish_3.py b/tests/examples/kafka/test_batch_publish_3.py index 86f9ea41bf..62d91d0389 100644 --- a/tests/examples/kafka/test_batch_publish_3.py +++ b/tests/examples/kafka/test_batch_publish_3.py @@ -6,10 +6,9 @@ @pytest.mark.asyncio async def test_example(): - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) - await handle_response.wait_call(3) + async with TestKafkaBroker(broker), TestApp(app): + await handle.wait_call(3) + await handle_response.wait_call(3) handle.mock.assert_called_with("hi") assert set(handle_response.mock.call_args[0][0]) == {"hi", "FastStream"} diff --git a/tests/examples/nats/test_e01_basic.py b/tests/examples/nats/test_e01_basic.py index 736e69fe7c..cd487c6184 100644 --- a/tests/examples/nats/test_e01_basic.py +++ b/tests/examples/nats/test_e01_basic.py @@ -8,6 +8,5 @@ async def test_basic(): from examples.nats.e01_basic import app, broker, handler - async with TestNatsBroker(broker): - async with TestApp(app): - handler.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker), TestApp(app): + handler.mock.assert_called_once_with("Hi!") diff --git a/tests/examples/nats/test_e02_basic_rpc.py b/tests/examples/nats/test_e02_basic_rpc.py index aa5237692e..f7a5e81100 100644 --- a/tests/examples/nats/test_e02_basic_rpc.py +++ b/tests/examples/nats/test_e02_basic_rpc.py @@ -8,6 +8,5 @@ async def test_basic(): from examples.nats.e02_basic_rpc import app, broker, handler - async with TestNatsBroker(broker): - async with TestApp(app): - handler.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker), TestApp(app): + handler.mock.assert_called_once_with("Hi!") diff --git a/tests/examples/nats/test_e03_publisher.py b/tests/examples/nats/test_e03_publisher.py index a9576a77f7..870552189a 100644 --- a/tests/examples/nats/test_e03_publisher.py +++ b/tests/examples/nats/test_e03_publisher.py @@ -8,7 +8,6 @@ async def test_basic(): from examples.nats.e03_publisher import app, broker, handler, response_handler - async with TestNatsBroker(broker): - async with TestApp(app): - handler.mock.assert_called_once_with("Hi!") - response_handler.mock.assert_called_once_with("Response") + async with TestNatsBroker(broker), TestApp(app): + handler.mock.assert_called_once_with("Hi!") + response_handler.mock.assert_called_once_with("Response") diff --git a/tests/examples/nats/test_e04_js_basic.py b/tests/examples/nats/test_e04_js_basic.py index b97f6a97dc..b806b5ad51 100644 --- a/tests/examples/nats/test_e04_js_basic.py +++ b/tests/examples/nats/test_e04_js_basic.py @@ -8,7 +8,6 @@ async def test_basic(): from examples.nats.e04_js_basic import app, broker, handler - async with TestNatsBroker(broker): - async with TestApp(app): - assert handler.mock.call_count == 2 - handler.mock.assert_called_with("Hi!") + async with TestNatsBroker(broker), TestApp(app): + assert handler.mock.call_count == 2 + handler.mock.assert_called_with("Hi!") diff --git a/tests/examples/nats/test_e05_basic_and_js.py b/tests/examples/nats/test_e05_basic_and_js.py index fdd45a828e..3be0c91c4b 100644 --- a/tests/examples/nats/test_e05_basic_and_js.py +++ b/tests/examples/nats/test_e05_basic_and_js.py @@ -8,7 +8,6 @@ async def test_basic(): from examples.nats.e05_basic_and_js import app, broker, core_handler, js_handler - async with TestNatsBroker(broker): - async with TestApp(app): - core_handler.mock.assert_called_once_with("Hi!") - js_handler.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker), TestApp(app): + core_handler.mock.assert_called_once_with("Hi!") + js_handler.mock.assert_called_once_with("Hi!") diff --git a/tests/examples/nats/test_e06_key_value.py b/tests/examples/nats/test_e06_key_value.py index e7ea7e78e3..56557b9b0e 100644 --- a/tests/examples/nats/test_e06_key_value.py +++ b/tests/examples/nats/test_e06_key_value.py @@ -9,7 +9,6 @@ async def test_basic(): from examples.nats.e06_key_value import app, broker, handler - async with TestNatsBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker, with_real=True), TestApp(app): + await handler.wait_call(3.0) + handler.mock.assert_called_once_with("Hi!") diff --git a/tests/examples/nats/test_e07_object_storage.py b/tests/examples/nats/test_e07_object_storage.py index 21b1189e11..a1ef83e581 100644 --- a/tests/examples/nats/test_e07_object_storage.py +++ b/tests/examples/nats/test_e07_object_storage.py @@ -9,7 +9,6 @@ async def test_basic(): from examples.nats.e07_object_storage import app, broker, handler - async with TestNatsBroker(broker, with_real=True, connect_only=True): - async with TestApp(app): - await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + async with TestNatsBroker(broker, with_real=True), TestApp(app): + await handler.wait_call(3.0) + handler.mock.assert_called_once_with("Hi!") diff --git a/tests/examples/nats/test_e08_wildcards.py b/tests/examples/nats/test_e08_wildcards.py index 0856aaa45c..38680dc7a9 100644 --- a/tests/examples/nats/test_e08_wildcards.py +++ b/tests/examples/nats/test_e08_wildcards.py @@ -8,8 +8,7 @@ async def test_basic(): from examples.nats.e08_wildcards import app, broker, handler, handler_match - async with TestNatsBroker(broker): - async with TestApp(app): - handler.mock.assert_called_once_with("Hi!") - handler_match.mock.assert_called_with("Hi!") - assert handler_match.mock.call_count == 2 + async with TestNatsBroker(broker), TestApp(app): + handler.mock.assert_called_once_with("Hi!") + handler_match.mock.assert_called_with("Hi!") + assert handler_match.mock.call_count == 2 diff --git a/tests/examples/router/test_basic_consume.py b/tests/examples/router/test_basic_consume.py index 6b140cb0c7..07475f0e56 100644 --- a/tests/examples/router/test_basic_consume.py +++ b/tests/examples/router/test_basic_consume.py @@ -6,8 +6,7 @@ @pytest.mark.asyncio async def test_example(): - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestKafkaBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("Hello!") + handle.mock.assert_called_with("Hello!") diff --git a/tests/examples/router/test_basic_publish.py b/tests/examples/router/test_basic_publish.py index f5b0778fd1..973dec7982 100644 --- a/tests/examples/router/test_basic_publish.py +++ b/tests/examples/router/test_basic_publish.py @@ -6,10 +6,9 @@ @pytest.mark.asyncio async def test_example(): - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) - await handle_response.wait_call(3) + async with TestKafkaBroker(broker), TestApp(app): + await handle.wait_call(3) + await handle_response.wait_call(3) - handle.mock.assert_called_with("Hello!") - handle_response.mock.assert_called_with("Response") + handle.mock.assert_called_with("Hello!") + handle_response.mock.assert_called_with("Response") diff --git a/tests/examples/router/test_delay_registration.py b/tests/examples/router/test_delay_registration.py index 8a49aaf096..5248f83788 100644 --- a/tests/examples/router/test_delay_registration.py +++ b/tests/examples/router/test_delay_registration.py @@ -8,8 +8,7 @@ async def test_example(): handle = broker.handlers["prefix_in"].calls[0][0] - async with TestKafkaBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestKafkaBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("Hello!") + handle.mock.assert_called_with("Hello!") diff --git a/tests/examples/test_e01_basic_consume.py b/tests/examples/test_e01_basic_consume.py index b81b5e697b..198ce00eed 100644 --- a/tests/examples/test_e01_basic_consume.py +++ b/tests/examples/test_e01_basic_consume.py @@ -6,8 +6,7 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("Hello!") + handle.mock.assert_called_with("Hello!") diff --git a/tests/examples/test_e02_1_basic_publisher.py b/tests/examples/test_e02_1_basic_publisher.py index 20dba24f17..e9df36c349 100644 --- a/tests/examples/test_e02_1_basic_publisher.py +++ b/tests/examples/test_e02_1_basic_publisher.py @@ -6,10 +6,9 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) - await handle_response.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) + await handle_response.wait_call(3) - handle.mock.assert_called_with("Hello!") - handle_response.mock.assert_called_with("Response") + handle.mock.assert_called_with("Hello!") + handle_response.mock.assert_called_with("Response") diff --git a/tests/examples/test_e02_2_basic_publisher.py b/tests/examples/test_e02_2_basic_publisher.py index c44d3a84fe..66ad2b8f4d 100644 --- a/tests/examples/test_e02_2_basic_publisher.py +++ b/tests/examples/test_e02_2_basic_publisher.py @@ -6,10 +6,9 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) - await handle_response.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) + await handle_response.wait_call(3) - handle.mock.assert_called_with("Hello!") - handle_response.mock.assert_called_with("Response") + handle.mock.assert_called_with("Hello!") + handle_response.mock.assert_called_with("Response") diff --git a/tests/examples/test_e02_3_basic_publisher.py b/tests/examples/test_e02_3_basic_publisher.py index be7a8a3378..bddb78c66c 100644 --- a/tests/examples/test_e02_3_basic_publisher.py +++ b/tests/examples/test_e02_3_basic_publisher.py @@ -6,10 +6,9 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) - await handle_response.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) + await handle_response.wait_call(3) - handle.mock.assert_called_with("Hello!") - handle_response.mock.assert_called_with("Response") + handle.mock.assert_called_with("Hello!") + handle_response.mock.assert_called_with("Response") diff --git a/tests/examples/test_e03_miltiple_pubsub.py b/tests/examples/test_e03_miltiple_pubsub.py index e90774bf6a..cb8886d918 100644 --- a/tests/examples/test_e03_miltiple_pubsub.py +++ b/tests/examples/test_e03_miltiple_pubsub.py @@ -12,12 +12,11 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) - await handle_response_1.wait_call(3) - await handle_response_2.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) + await handle_response_1.wait_call(3) + await handle_response_2.wait_call(3) - handle.mock.assert_called_with("Hello!") - handle_response_1.mock.assert_called_with("Response") - handle_response_2.mock.assert_called_with("Response") + handle.mock.assert_called_with("Hello!") + handle_response_1.mock.assert_called_with("Response") + handle_response_2.mock.assert_called_with("Response") diff --git a/tests/examples/test_e04_msg_filter.py b/tests/examples/test_e04_msg_filter.py index e546ebc9f1..7eb5b326cb 100644 --- a/tests/examples/test_e04_msg_filter.py +++ b/tests/examples/test_e04_msg_filter.py @@ -6,10 +6,9 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle_json.wait_call(3) - await handle_other_messages.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle_json.wait_call(3) + await handle_other_messages.wait_call(3) - handle_json.mock.assert_called_with({"msg": "Hello!"}) - handle_other_messages.mock.assert_called_with("Hello!") + handle_json.mock.assert_called_with({"msg": "Hello!"}) + handle_other_messages.mock.assert_called_with("Hello!") diff --git a/tests/examples/test_e05_rpc_request.py b/tests/examples/test_e05_rpc_request.py index a06fdc3038..4f6e57a0ae 100644 --- a/tests/examples/test_e05_rpc_request.py +++ b/tests/examples/test_e05_rpc_request.py @@ -6,8 +6,7 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("ping") + handle.mock.assert_called_with("ping") diff --git a/tests/examples/test_e06_manual_ack.py b/tests/examples/test_e06_manual_ack.py index 2011653731..fdc8f4b7db 100644 --- a/tests/examples/test_e06_manual_ack.py +++ b/tests/examples/test_e06_manual_ack.py @@ -6,8 +6,7 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("Hello!") + handle.mock.assert_called_with("Hello!") diff --git a/tests/examples/test_e07_ack_immediately.py b/tests/examples/test_e07_ack_immediately.py index ebd9de033c..8cd1b97e03 100644 --- a/tests/examples/test_e07_ack_immediately.py +++ b/tests/examples/test_e07_ack_immediately.py @@ -6,8 +6,7 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("Hello!") + handle.mock.assert_called_with("Hello!") diff --git a/tests/examples/test_e10_middlewares.py b/tests/examples/test_e10_middlewares.py index 6b1951c777..e44155865e 100644 --- a/tests/examples/test_e10_middlewares.py +++ b/tests/examples/test_e10_middlewares.py @@ -6,8 +6,7 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("fake message") + handle.mock.assert_called_with("fake message") diff --git a/tests/examples/test_e11_settings.py b/tests/examples/test_e11_settings.py index ee7ea87a70..13b5ac3634 100644 --- a/tests/examples/test_e11_settings.py +++ b/tests/examples/test_e11_settings.py @@ -6,8 +6,7 @@ @pytest.mark.asyncio async def test_example(): - async with TestRabbitBroker(broker, connect_only=True): - async with TestApp(app): - await handle.wait_call(3) + async with TestRabbitBroker(broker), TestApp(app): + await handle.wait_call(3) - handle.mock.assert_called_with("Hello!") + handle.mock.assert_called_with("Hello!") diff --git a/tests/utils/context/test_path.py b/tests/utils/context/test_path.py index 1f72e4fa1c..61f5992e34 100644 --- a/tests/utils/context/test_path.py +++ b/tests/utils/context/test_path.py @@ -9,6 +9,7 @@ RabbitQueue, TestRabbitBroker, ) +from faststream.redis import RedisBroker, TestRedisBroker @pytest.mark.asyncio @@ -33,6 +34,28 @@ async def h( ) +@pytest.mark.asyncio +async def test_redis_path(): + broker = RedisBroker() + + @broker.subscriber("in.{name}.{id}") + async def h( + name: str = Path(), + id_: int = Path("id"), + ): + assert name == "john" + assert id_ == 1 + return 1 + + async with TestRedisBroker(broker) as br: + assert 1 == await br.publish( + "", + "in.john.1", + rpc=True, + rpc_timeout=1.0, + ) + + @pytest.mark.asyncio async def test_rabbit_path(): broker = RabbitBroker() diff --git a/tests/utils/test_ast.py b/tests/utils/test_ast.py new file mode 100644 index 0000000000..29c69c924c --- /dev/null +++ b/tests/utils/test_ast.py @@ -0,0 +1,85 @@ +import pytest + +from faststream.utils.ast import is_contains_context_name + + +class Context: + def __enter__(self) -> "Context": + return self + + def __exit__(self, *args): + pass + + async def __aenter__(self) -> "Context": + return self + + async def __aexit__(self, *args): + pass + + +class A(Context): + def __init__(self): + self.contains = is_contains_context_name(self.__class__.__name__, B.__name__) + + +class B(Context): + def __init__(self): + pass + + +def test_base(): + with A() as a, B(): + assert a.contains + + +@pytest.mark.asyncio +async def test_base_async(): + async with A() as a, B(): + assert a.contains + + +def test_nested(): + with A() as a: + with B(): + assert a.contains + + +@pytest.mark.asyncio +async def test_nested_async(): + async with A() as a: + async with B(): + assert a.contains + + +@pytest.mark.asyncio +async def test_async_A(): + async with A() as a: + with B(): + assert a.contains + + +@pytest.mark.asyncio +async def test_async_B(): + with A() as a: + async with B(): + assert a.contains + + +def test_base_invalid(): + with B(), B(), A() as a: + assert not a.contains + + +def test_nested_invalid(): + with B(): + with A() as a: + assert not a.contains + + +def test_not_broken(): + with A() as a, B(): + assert a.contains + + # test ast proccesses another context correctly + with pytest.raises(ValueError): + raise ValueError()