Skip to content

Commit

Permalink
adds anthropic model claude 3 opus
Browse files Browse the repository at this point in the history
  • Loading branch information
lauraschauer committed Jul 5, 2024
1 parent f15a5ad commit f4139d4
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 2 deletions.
2 changes: 2 additions & 0 deletions prospector/llm/instantiation.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from langchain_mistralai import ChatMistralAI
from langchain_openai import ChatOpenAI

from llm.models.anthropic import Anthropic
from llm.models.gemini import Gemini
from llm.models.mistral import Mistral
from llm.models.openai import OpenAI
Expand All @@ -26,6 +27,7 @@
# "gpt-4o": OpenAI, # currently TBD
"gemini-1.0-pro": Gemini,
"mistral-large": Mistral,
"claude-3-opus": Anthropic,
}


Expand Down
4 changes: 2 additions & 2 deletions prospector/llm/llm_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,9 @@ def classify_commit(
except Exception as e:
raise RuntimeError(f"Prompt-model chain could not be invoked: {e}")

if is_relevant == "True":
if is_relevant in ["True", "ANSWER:True"]:
return True
elif is_relevant == "False":
elif is_relevant in ["False", "ANSWER:False"]:
return False
else:
raise RuntimeError(f"The model returned an invalid response: {is_relevant}")
74 changes: 74 additions & 0 deletions prospector/llm/models/anthropic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
from typing import Any, Dict, List, Optional

import requests
from langchain_core.language_models.llms import LLM

import llm.instantiation as instantiation
from log.logger import logger


class Anthropic(LLM):
model_name: str
deployment_url: str
temperature: float
ai_core_sk_filepath: str

@property
def _llm_type(self) -> str:
return "SAP Anthropic"

@property
def _identifying_params(self) -> Dict[str, Any]:
"""Return a dictionary of identifying parameters."""
return {
"model_name": self.model_name,
"deployment_url": self.deployment_url,
"temperature": self.temperature,
"ai_core_sk_filepath": self.ai_core_sk_filepath,
}

def _call(
self, prompt: str, stop: Optional[List[str]] = None, **kwargs: Any
) -> str:
endpoint = f"{self.deployment_url}/invoke"
headers = instantiation.get_headers(self.ai_core_sk_filepath)
data = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [
{
"role": "user",
"content": f"{prompt}",
}
],
"temperature": self.temperature,
}

try:
response = requests.post(endpoint, headers=headers, json=data)
response.raise_for_status()
return self.parse(response.json())
except requests.exceptions.HTTPError as http_error:
logger.error(
f"HTTP error occurred when sending a request through AI Core: {http_error}"
)
raise
except requests.exceptions.Timeout as timeout_err:
logger.error(
f"Timeout error occured when sending a request through AI Core: {timeout_err}"
)
raise
except requests.exceptions.ConnectionError as conn_err:
logger.error(
f"Connection error occurred when sending a request through AI Core: {conn_err}"
)
raise
except requests.exceptions.RequestException as req_err:
logger.error(
f"A request error occured when sending a request through AI Core: {req_err}"
)
raise

def parse(self, message) -> str:
"""Parse the returned JSON object from OpenAI."""
return message["content"][0]["text"]

0 comments on commit f4139d4

Please sign in to comment.