diff --git a/docs/api-reference/Model Testing/model_testing.py b/docs/api-reference/Model Testing/model_testing.py new file mode 100644 index 000000000..8fc3063ab --- /dev/null +++ b/docs/api-reference/Model Testing/model_testing.py @@ -0,0 +1,33 @@ +import os + +import requests + + +# model testing +def test_openrouter_model(): + url = "https://openrouter.ai/api/v1/chat/completions" + api_key = os.getenv("OPENROUTER_API_KEY") or '' + + # Set headers and payload for test + headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} + payload = { + "model": "openai/gpt-3.5-turbo", + "messages": [{"role": "user", "content": "This is a test message for model verification."}], + } + + # Send test request + response = requests.post(url, headers=headers, json=payload) + + if response.ok: + # Display a sample response if the model responds successfully + response_data = response.json() + print("Model test successful!") + print("Response:", response_data) + else: + # If there’s an error, print the error message + print(f"Model test failed with status code {response.status_code}.") + print("Error details:", response.text) + + +# Call the testing function +test_openrouter_model() diff --git a/docs/integrations/openrouter.py b/docs/integrations/openrouter.py new file mode 100644 index 000000000..6eb5989a6 --- /dev/null +++ b/docs/integrations/openrouter.py @@ -0,0 +1,37 @@ +import os + +import requests + + +def get_openrouter_response() -> None: + url = "https://openrouter.ai/api/v1/chat/completions" + + # Create headers and payload + headers = { + "Authorization": f"Bearer {os.getenv('openrouter_api_key')}", + "HTTP-Referer": os.getenv("your_site_url"), + "X-Title": os.getenv("your_site_name"), + "Content-Type": "application/json", + } + + payload = {"model": "OPENROUTER_MODEL", "messages": [{"role": "user", "content": "YOUR QUESTION HERE"}]} + + print("Sending request...") + + # Send request and capture response + response = requests.post(url, headers=headers, json=payload) + + # Print status code and response content + print("Response Content:", response.text) + + # Handle the response + if response.ok: + try: + print("Response JSON:", response.json()) + except ValueError: + print("Response is not in JSON format.") + else: + print(f"Error: {response.status_code}\n{response.text}") + + +get_openrouter_response() diff --git a/setup.sh b/setup.sh old mode 100755 new mode 100644 index 8b7a145ec..8655184b2 --- a/setup.sh +++ b/setup.sh @@ -52,7 +52,7 @@ setup_llm_providers() { else update_or_add_env_var "OPENAI_API_KEY" "$openai_api_key" update_or_add_env_var "ENABLE_OPENAI" "true" - model_options+=("OPENAI_GPT4_TURBO" "OPENAI_GPT4V" "OPENAI_GPT4O") + model_options+=("OPENAI_GPT4_TURBO" "OPENAI_GPT4V" "OPENAI_GPT4O" "ANTHROPIC/CLAUDE-3.5-SONNET" "meta-llama/llama-3.2-90b-vision-instruct") fi else update_or_add_env_var "ENABLE_OPENAI" "false" @@ -98,6 +98,24 @@ setup_llm_providers() { update_or_add_env_var "ENABLE_AZURE" "false" fi + # Openrouter Configuration + echo "To enable Openrouter, you must have an Openrouter API key." + read -p "Do you want to enable Openrouter (y/n)? " enable_openrouter + if [[ "$enable_openrouter" == "y" ]]; then + read -p "Enter your Openrouter API key: " openrouter_api_key + if [ -z "$openrouter_api_key" ]; then + echo "Error: Openrouter API key is required." + echo "Openrouter will not be enabled." + else + update_or_add_env_var "OPENROUTER_API_KEY" "$openrouter_api_key" + update_or_add_env_var "ENABLE_OPENROUTER" "true" + model_options+=("ANTHROPIC/CLAUDE-3.5-SONNET" "meta-llama/llama-3.2-90b-vision-instruct" "google/gemini-flash-1.5-8b") + fi + else + update_or_add_env_var "ENABLE_OPENROUTER" "false" + fi + + # Model Selection if [ ${#model_options[@]} -eq 0 ]; then echo "No LLM providers enabled. You won't be able to run Skyvern unless you enable at least one provider. You can re-run this script to enable providers or manually update the .env file." @@ -308,3 +326,8 @@ main() { # Execute main function main + +#Test Model +bash +echo "Testing OpenRouter model connection..." +python3 -c "from your_module import test_openrouter_model; test_openrouter_model()" \ No newline at end of file