Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Ellipsis] feat: add OpenRouter model testing and integration #1060

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions docs/api-reference/Model Testing/model_testing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import os

import requests


# model testing
def test_openrouter_model():
url = "https://openrouter.ai/api/v1/chat/completions"
api_key = os.getenv("OPENROUTER_API_KEY") or ''

# Set headers and payload for test
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
payload = {
"model": "openai/gpt-3.5-turbo",
"messages": [{"role": "user", "content": "This is a test message for model verification."}],
}

# Send test request
response = requests.post(url, headers=headers, json=payload)

if response.ok:
# Display a sample response if the model responds successfully
response_data = response.json()
print("Model test successful!")
print("Response:", response_data)
else:
# If there’s an error, print the error message
print(f"Model test failed with status code {response.status_code}.")
print("Error details:", response.text)


# Call the testing function
test_openrouter_model()
37 changes: 37 additions & 0 deletions docs/integrations/openrouter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import os

import requests


def get_openrouter_response() -> None:
url = "https://openrouter.ai/api/v1/chat/completions"

# Create headers and payload
headers = {
"Authorization": f"Bearer {os.getenv('openrouter_api_key')}",
"HTTP-Referer": os.getenv("your_site_url"),
"X-Title": os.getenv("your_site_name"),
"Content-Type": "application/json",
}

payload = {"model": "OPENROUTER_MODEL", "messages": [{"role": "user", "content": "YOUR QUESTION HERE"}]}

print("Sending request...")

# Send request and capture response
response = requests.post(url, headers=headers, json=payload)

# Print status code and response content
print("Response Content:", response.text)

# Handle the response
if response.ok:
try:
print("Response JSON:", response.json())
except ValueError:
print("Response is not in JSON format.")
else:
print(f"Error: {response.status_code}\n{response.text}")


get_openrouter_response()
25 changes: 24 additions & 1 deletion setup.sh
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ setup_llm_providers() {
else
update_or_add_env_var "OPENAI_API_KEY" "$openai_api_key"
update_or_add_env_var "ENABLE_OPENAI" "true"
model_options+=("OPENAI_GPT4_TURBO" "OPENAI_GPT4V" "OPENAI_GPT4O")
model_options+=("OPENAI_GPT4_TURBO" "OPENAI_GPT4V" "OPENAI_GPT4O" "ANTHROPIC/CLAUDE-3.5-SONNET" "meta-llama/llama-3.2-90b-vision-instruct")
fi
else
update_or_add_env_var "ENABLE_OPENAI" "false"
Expand Down Expand Up @@ -98,6 +98,24 @@ setup_llm_providers() {
update_or_add_env_var "ENABLE_AZURE" "false"
fi

# Openrouter Configuration
echo "To enable Openrouter, you must have an Openrouter API key."
read -p "Do you want to enable Openrouter (y/n)? " enable_openrouter
if [[ "$enable_openrouter" == "y" ]]; then
read -p "Enter your Openrouter API key: " openrouter_api_key
if [ -z "$openrouter_api_key" ]; then
echo "Error: Openrouter API key is required."
echo "Openrouter will not be enabled."
else
update_or_add_env_var "OPENROUTER_API_KEY" "$openrouter_api_key"
update_or_add_env_var "ENABLE_OPENROUTER" "true"
model_options+=("ANTHROPIC/CLAUDE-3.5-SONNET" "meta-llama/llama-3.2-90b-vision-instruct" "google/gemini-flash-1.5-8b")
fi
else
update_or_add_env_var "ENABLE_OPENROUTER" "false"
fi


# Model Selection
if [ ${#model_options[@]} -eq 0 ]; then
echo "No LLM providers enabled. You won't be able to run Skyvern unless you enable at least one provider. You can re-run this script to enable providers or manually update the .env file."
Expand Down Expand Up @@ -308,3 +326,8 @@ main() {

# Execute main function
main

#Test Model
bash
echo "Testing OpenRouter model connection..."
python3 -c "from your_module import test_openrouter_model; test_openrouter_model()"
Loading