Skip to content

Commit

Permalink
Add SSE example, move folders around, clean secrets (#104)
Browse files Browse the repository at this point in the history
  • Loading branch information
hume-brian authored Nov 15, 2024
1 parent 8fe4a27 commit f281a75
Show file tree
Hide file tree
Showing 55 changed files with 8,626 additions and 5 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,4 @@
.DS_Store
.hume/
__pycache__/
.venv/
.DS_Store
25 changes: 25 additions & 0 deletions evi-custom-language-model-sse/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Custom Language Model SSE Client

A Python client library for handling Server-Sent Events (SSE) with Hume Custom Language Models, specifically designed to work with OpenAI-compatible APIs.

## Features

- Server-Sent Events (SSE) client implementation
- Compatible with OpenAI-style streaming responses
- Support for custom language model endpoints
- Easy-to-use async interface

## Installation

```bash
poetry install
```

## Usage

Run the server:
```bash
poetry run python openai_sse.py
```

Spin it up behind ngrok and use the ngrok URL in your config.
75 changes: 75 additions & 0 deletions evi-custom-language-model-sse/openai_sse.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
from typing import AsyncIterable, Optional
import fastapi
from fastapi.responses import StreamingResponse
from openai.types.chat import ChatCompletionChunk, ChatCompletionMessageParam
import openai
import os
from fastapi import HTTPException, Security
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials

app = fastapi.FastAPI()
"""
This would be the server that Hume would send requests to, that would then
get streamed back to us.
uvicorn openai_sse:app --reload
"""

client = openai.AsyncOpenAI(api_key=os.environ["OPENAI_API_KEY"])


async def get_response(
raw_messages: list[dict],
custom_session_id: Optional[str],
) -> AsyncIterable[str]:
"""
Stream a response from OpenAI back to Hume.
"""
messages: list[ChatCompletionMessageParam] = [
{"role": m["role"], "content": m["content"]} for m in raw_messages
]

chat_completion_chunk_stream = await client.chat.completions.create(
messages=messages,
model="gpt-4o",
stream=True,
)

async for chunk in chat_completion_chunk_stream:
yield "data: " + chunk.model_dump_json(exclude_none=True) + "\n\n"
yield "data: [DONE]\n\n"


security = HTTPBearer()
API_KEY = "your-secret-key-here" # In production, use environment variables


async def verify_token(credentials: HTTPAuthorizationCredentials = Security(security)):
if credentials.credentials != API_KEY:
raise HTTPException(status_code=401, detail="Invalid authentication token")
return credentials.credentials


@app.post("/chat/completions", response_class=StreamingResponse)
async def root(
request: fastapi.Request,
# token: str = Security(verify_token)
):
"""Chat completions endpoint with Bearer token authentication"""
request_json = await request.json()
messages = request_json["messages"]
print(messages)

custom_session_id = request.query_params.get("custom_session_id")
print(custom_session_id)

return StreamingResponse(
get_response(messages, custom_session_id=custom_session_id),
media_type="text/event-stream",
)


if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="0.0.0.0", port=8000)
483 changes: 483 additions & 0 deletions evi-custom-language-model-sse/poetry.lock

Large diffs are not rendered by default.

17 changes: 17 additions & 0 deletions evi-custom-language-model-sse/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[tool.poetry]
name = "evi-custom-language-model-sse"
version = "0.1.0"
description = ""
authors = ["[email protected]"]
readme = "README.md"

[tool.poetry.dependencies]
python = "^3.11"
fastapi = "^0.115.5"
uvicorn = "^0.32.0"
openai = "^1.54.4"


[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -84,15 +84,15 @@ You can sign up for a free SerpApi key that is good for 100 free searches. Once
* Sign up for an account [here.](https://serpapi.com/users/sign_up)

```text
SERPAPI_API_KEY=9a71b5441a2983748991d9e47b8aa54eb235f282d058e7e6d95dad854e315433
SERPAPI_API_KEY=<your-serpapi-api-key>
```

### 9. Sign up (if you haven't already) for an OpenAI API key and also add it to the `.env` file

Your `.env` file should look something like this:
```text
SERPAPI_API_KEY=9a71b5441a2983748991d9e47b8aa5482d058e7e6d95dad854e315433
OPENAI_API_KEY=sk-pFgGbNkxB95472398475932NLfT3BlbkFJBC6YKGKjjjjkHezP48ZK7
SERPAPI_API_KEY=<your-serpapi-api-key>
OPENAI_API_KEY=<your-openai-api-key>
```

Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
37 changes: 37 additions & 0 deletions evi-wss-clm-endpoint/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import json

import uvicorn
from agent import eliza_response

from fastapi import FastAPI, WebSocket

eliza_app = FastAPI()


@eliza_app.get("/")
async def root():
return {"message": "Hello World"}


@eliza_app.websocket("/ws")
async def websocket_handler(websocket: WebSocket) -> None:
await websocket.accept()
while True:
data = await websocket.receive_text()

hume_payload = json.loads(data)

print(hume_payload)

last_message = hume_payload["messages"][-1]["message"]["content"]

user_text = last_message.split("{")[0] or ""

await websocket.send_text(
json.dumps({"type": "assistant_input", "text": eliza_response(user_text)})
)
await websocket.send_text(json.dumps({"type": "assistant_end"}))


if __name__ == "__main__":
uvicorn.run(eliza_app, host="0.0.0.0", port=8000)
File renamed without changes.
File renamed without changes.
File renamed without changes.
Binary file not shown.
45 changes: 45 additions & 0 deletions evi-wss-clm-endpoint/cdk/cdk.out/ElizaStack.assets.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{
"version": "36.0.0",
"files": {
"ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a": {
"source": {
"path": "asset.ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a",
"packaging": "zip"
},
"destinations": {
"current_account-current_region": {
"bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}",
"objectKey": "ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a.zip",
"assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}"
}
}
},
"914151f6f3dff61235ecc07604e20d47eefdda2a4051d47aff607ccea64c12dd": {
"source": {
"path": "ElizaStack.template.json",
"packaging": "file"
},
"destinations": {
"current_account-current_region": {
"bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}",
"objectKey": "914151f6f3dff61235ecc07604e20d47eefdda2a4051d47aff607ccea64c12dd.json",
"assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}"
}
}
}
},
"dockerImages": {
"689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0": {
"source": {
"directory": "asset.689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0"
},
"destinations": {
"current_account-current_region": {
"repositoryName": "cdk-hnb659fds-container-assets-${AWS::AccountId}-${AWS::Region}",
"imageTag": "689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0",
"assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-image-publishing-role-${AWS::AccountId}-${AWS::Region}"
}
}
}
}
}
Loading

0 comments on commit f281a75

Please sign in to comment.