Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
clemlesne committed Nov 21, 2024
2 parents e7ea03d + 23503dc commit 5ca7ac2
Show file tree
Hide file tree
Showing 37 changed files with 4,431 additions and 3,512 deletions.
1 change: 0 additions & 1 deletion .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
"github.vscode-github-actions",
"mechatroner.rainbow-csv",
"mikestead.dotenv",
"ms-azuretools.vscode-azurefunctions",
"ms-azuretools.vscode-bicep",
"ms-python.black-formatter",
"ms-python.debugpy",
Expand Down
3 changes: 3 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
*
!app/
!requirements.txt
3 changes: 3 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,6 @@ indent_size = 4
[Makefile]
indent_size = 4
indent_style = tab

[Dockerfile]
indent_size = 4
16 changes: 13 additions & 3 deletions .github/workflows/pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ jobs:
- sast-semgrep
runs-on: ubuntu-22.04
permissions:
# Allow to write to GitHub Packages
attestations: write
id-token: write
packages: write
steps:
- name: Checkout
Expand All @@ -116,7 +117,7 @@ jobs:
with:
version: v${{ env.BUILDX_VERSION }}

- name: Login to registry - GitHub
- name: Login to registry
uses: docker/[email protected]
with:
registry: ${{ env.CONTAINER_REGISTRY_GHCR }}
Expand All @@ -141,20 +142,29 @@ jobs:
org.opencontainers.image.vendor=${{ github.actor }}
- name: Build/push container
id: build
uses: docker/[email protected]
with:
build-args: |
VERSION=${{ needs.init.outputs.VERSION_FULL }}
cache-from: type=gha
cache-to: type=gha
context: .
file: cicd/Dockerfile
labels: ${{ steps.meta.outputs.labels }}
outputs: type=registry,oci-mediatypes=true,compression=estargz,compression-level=9,force-compression=true
platforms: ${{ env.CONTAINER_PLATFORMS }}
provenance: true
push: true
sbom: true
tags: ${{ steps.meta.outputs.tags }}

- name: Generate attestations
uses: actions/[email protected]
with:
push-to-registry: true
subject-digest: ${{ steps.build.outputs.digest }}
subject-name: ${{ env.CONTAINER_REGISTRY_GHCR }}/${{ env.CONTAINER_NAME }}

create-release:
name: Create release
needs:
Expand Down
3 changes: 0 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -744,8 +744,5 @@ config*.yaml
# Tests reports
test-reports/

# Azure Functions
.python_packages/

# Azure dev tunnels local installation
DevTunnels/
1 change: 0 additions & 1 deletion .vscode/extensions.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
"EditorConfig.EditorConfig",
"github.vscode-github-actions",
"mikestead.dotenv",
"ms-azuretools.vscode-azurefunctions",
"ms-azuretools.vscode-bicep",
"ms-python.debugpy",
"ms-python.python",
Expand Down
17 changes: 12 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version_full ?= $(shell $(MAKE) --silent version-full)
version_small ?= $(shell $(MAKE) --silent version)
# Dev tunnels configuration
tunnel_name := call-center-ai-$(shell hostname | sed 's/[^a-zA-Z0-9]//g' | tr '[:upper:]' '[:lower:]')
tunnel_url ?= $(shell res=$$(devtunnel show $(tunnel_name) | grep -o 'http[s]*://[^"]*' | xargs) && echo $${res%/})
tunnel_url ?= $(shell res=$$(devtunnel show $(tunnel_name) | grep -o 'http[s]*://[^ ]*' | xargs) && echo $${res%/})
# Container configuration
container_name := ghcr.io/clemlesne/call-center-ai
docker := docker
Expand Down Expand Up @@ -37,8 +37,11 @@ brew:
@echo "➡️ Installing Azure CLI..."
brew install azure-cli

@echo "➡️ Installing Azure Functions Core Tools..."
brew tap azure/functions && brew install azure-functions-core-tools@4
@echo "➡️ Installing pyenv..."
brew install pyenv

@echo "➡️ Installing Rust..."
brew install rust

@echo "➡️ Installing Azure Dev tunnels..."
curl -sL https://aka.ms/DevTunnelCliInstall | bash
Expand Down Expand Up @@ -66,12 +69,14 @@ upgrade:
@echo "➡️ Compiling app requirements..."
pip-compile \
--output-file requirements.txt \
--upgrade \
pyproject.toml

@echo "➡️ Compiling dev requirements..."
pip-compile \
--extra dev \
--output-file requirements-dev.txt \
--upgrade \
pyproject.toml

@echo "➡️ Upgrading Bicep CLI..."
Expand Down Expand Up @@ -118,8 +123,10 @@ dev:
--workers 2

build:
$(docker) build \
DOCKER_BUILDKIT=1 $(docker) build \
--build-arg VERSION=$(version_full) \
--file cicd/Dockerfile \
--platform linux/amd64,linux/arm64 \
--tag $(container_name):$(version_small) \
--tag $(container_name):latest \
.
Expand Down Expand Up @@ -195,7 +202,7 @@ watch-call:
@echo "👀 Watching status of $(phone_number)..."
while true; do \
clear; \
curl -s "$(endpoint)/call?phone_number=%2B$(phone_number)" | yq --prettyPrint '.[0] | {"phone_number": .phone_number, "claim": .claim, "reminders": .reminders}'; \
curl -s "$(endpoint)/call?phone_number=%2B$(phone_number)" | yq --prettyPrint '.[0] | {"phone_number": .initiate.phone_number, "claim": .claim, "reminders": .reminders}'; \
sleep 3; \
done

Expand Down
42 changes: 27 additions & 15 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -283,12 +283,11 @@ In macOS, with [Homebrew](https://brew.sh), simply type `make brew`.

For other systems, make sure you have the following installed:

- Bash compatible shell, like `bash` or `zsh`
- [yq](https://github.com/mikefarah/yq?tab=readme-ov-file#install)
- Make, `apt install make` (Ubuntu), `yum install make` (CentOS), `brew install make` (macOS)
- [Azure CLI](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
- [Azure Functions Core Tools](https://github.com/Azure/azure-functions-core-tools?tab=readme-ov-file#installing)
- [Twilio CLI](https://www.twilio.com/docs/twilio-cli/getting-started/install) (optional)
- [yq](https://github.com/mikefarah/yq?tab=readme-ov-file#install)
- Bash compatible shell, like `bash` or `zsh`
- Make, `apt install make` (Ubuntu), `yum install make` (CentOS), `brew install make` (macOS)

Then, Azure resources are needed:

Expand Down Expand Up @@ -365,7 +364,17 @@ make logs name=my-rg-name

### Local (on your machine)

#### 1. Create the full config file
#### 1. Prerequisites

In macOS, with [Homebrew](https://brew.sh), simply type `make brew`, if not already done.

For other systems, make sure you have the following installed:

- [pyenv](https://github.com/pyenv/pyenv) (optional, with a [virtualenv](https://github.com/pyenv/pyenv-virtualenv) named `callcenterai312`)
- [Python 3.12](https://docs.python.org/3.12)
- [Rust](https://rust-lang.org)

#### 2. Create the full config file

> [!TIP]
> To use a Service Principal to authenticate to Azure, you can also add the following in a `.env` file:
Expand All @@ -379,7 +388,7 @@ make logs name=my-rg-name
> [!TIP]
> If the application is already deployed on Azure, you can run `make name=my-rg-name sync-local-config` to copy the configuration from the Azure Function App to your local machine.
Local config file is named `config.yaml`:
Configure the local config file, named `config.yaml`:

```yaml
# config.yaml
Expand All @@ -402,9 +411,11 @@ communication_services:
resource_id: xxx
sms_queue_name: sms-33612345678

# Must be of type "AI services multi-service account"
cognitive_service:
# Must be of type "AI services multi-service account"
endpoint: https://xxx.cognitiveservices.azure.com
region: swedencentral
resource_id: xxx

llm:
fast:
Expand Down Expand Up @@ -437,7 +448,7 @@ ai_translation:
endpoint: https://xxx.cognitiveservices.azure.com
```
#### 2. Run the deployment automation
#### 3. Run the deployment automation
```zsh
make deploy-bicep deploy-post name=my-rg-name
Expand All @@ -446,14 +457,14 @@ make deploy-bicep deploy-post name=my-rg-name
- This will deploy the Azure resources without the API server, allowing you to test the bot locally
- Wait for the deployment to finish

#### 3. Initialize local function config
#### 4. Initialize local function config

Copy `local.example.settings.json` to `local.settings.json`, then fill the required fields:

- `APPLICATIONINSIGHTS_CONNECTION_STRING`, as the connection string of the Application Insights resource
- `AzureWebJobsStorage`, as the connection string of the Azure Storage account

#### 4. Connect to Azure Dev tunnels
#### 5. Connect to Azure Dev tunnels

> [!IMPORTANT]
> Tunnel requires to be run in a separate terminal, because it needs to be running all the time
Expand All @@ -466,7 +477,7 @@ devtunnel login
make tunnel
```

#### 5. Iterate quickly with the code
#### 6. Iterate quickly with the code

> [!NOTE]
> To override a specific configuration value, you can use environment variables. For example, to override the `llm.fast.endpoint` value, you can use the `LLM__FAST__ENDPOINT` variable:
Expand Down Expand Up @@ -622,11 +633,12 @@ Conversation options are represented as features. They can be configured from Ap
|-|-|-|
| `answer_hard_timeout_sec` | The hard timeout for the bot answer in seconds. | `int` | 180 |
| `answer_soft_timeout_sec` | The soft timeout for the bot answer in seconds. | `int` | 30 |
| `callback_timeout_hour` | The timeout for a callback in hours. | `int` | 72 |
| `phone_silence_timeout_sec` | The timeout for phone silence in seconds. | `int` | 1 |
| `callback_timeout_hour` | The timeout for a callback in hours. | `int` | 3 |
| `recognition_retry_max` | The maximum number of retries for voice recognition. | `int` | 2 |
| `recording_enabled` | Whether call recording is enabled. | `bool` | false |
| `slow_llm_for_chat` | Whether to use the slower LLM for chat. | `bool` | true |
| `voice_recognition_retry_max` | The maximum number of retries for voice recognition. | `int` | 2 |
| `slow_llm_for_chat` | Whether to use the slow LLM for chat. | `bool` | false |
| `vad_silence_timeout_ms` | The timeout for phone silence in seconds. | `int` | 500 |
| `vad_threshold` | The threshold for voice activity detection. | `float` | 0.5 |

### Use an OpenAI compatible model for the LLM

Expand Down
Loading

0 comments on commit 5ca7ac2

Please sign in to comment.