diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts
index 1abb68d993d5..1af070db4b9a 100644
--- a/client/src/api/schema/schema.ts
+++ b/client/src/api/schema/schema.ts
@@ -22,11 +22,44 @@ export interface paths {
trace?: never;
};
"/api/chat": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ get?: never;
+ put?: never;
/**
* Query
* @description We're off to ask the wizard
*/
post: operations["query_api_chat_post"];
+ delete?: never;
+ options?: never;
+ head?: never;
+ patch?: never;
+ trace?: never;
+ };
+ "/api/chat/{job_id}/feedback": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ get?: never;
+ /**
+ * Feedback
+ * @description Provide feedback on the chatbot response.
+ */
+ put: operations["feedback_api_chat__job_id__feedback_put"];
+ post?: never;
+ delete?: never;
+ options?: never;
+ head?: never;
+ patch?: never;
+ trace?: never;
};
"/api/configuration": {
parameters: {
@@ -6551,16 +6584,16 @@ export interface components {
/** ChatPayload */
ChatPayload: {
/**
- * Message
- * @description The message to be sent to the chat.
+ * Context
+ * @description The context for the chatbot.
+ * @default
*/
- query: string;
+ context: string | null;
/**
- * Context
- * @description The context identifier to be used by the chat.
- * @enum {string}
+ * Query
+ * @description The query to be sent to the chatbot.
*/
- context?: "username" | "tool_error";
+ query: string;
};
/** CheckForUpdatesResponse */
CheckForUpdatesResponse: {
@@ -18680,10 +18713,17 @@ export interface operations {
};
};
query_api_chat_post: {
- /**
- * Query
- * @description We're off to ask the wizard
- */
+ parameters: {
+ query: {
+ job_id: string | null;
+ };
+ header?: {
+ /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */
+ "run-as"?: string | null;
+ };
+ path?: never;
+ cookie?: never;
+ };
requestBody: {
content: {
"application/json": components["schemas"]["ChatPayload"];
@@ -18692,14 +18732,74 @@ export interface operations {
responses: {
/** @description Successful Response */
200: {
+ headers: {
+ [name: string]: unknown;
+ };
content: {
"application/json": string;
};
};
- /** @description Validation Error */
- 422: {
+ /** @description Request Error */
+ "4XX": {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["MessageExceptionModel"];
+ };
+ };
+ /** @description Server Error */
+ "5XX": {
+ headers: {
+ [name: string]: unknown;
+ };
content: {
- "application/json": components["schemas"]["HTTPValidationError"];
+ "application/json": components["schemas"]["MessageExceptionModel"];
+ };
+ };
+ };
+ };
+ feedback_api_chat__job_id__feedback_put: {
+ parameters: {
+ query: {
+ feedback: number;
+ };
+ header?: {
+ /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */
+ "run-as"?: string | null;
+ };
+ path: {
+ job_id: string | null;
+ };
+ cookie?: never;
+ };
+ requestBody?: never;
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": number;
+ };
+ };
+ /** @description Request Error */
+ "4XX": {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["MessageExceptionModel"];
+ };
+ };
+ /** @description Server Error */
+ "5XX": {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["MessageExceptionModel"];
};
};
};
diff --git a/client/src/components/DatasetInformation/DatasetError.vue b/client/src/components/DatasetInformation/DatasetError.vue
index 580554c1feb4..cbe9f3e40189 100644
--- a/client/src/components/DatasetInformation/DatasetError.vue
+++ b/client/src/components/DatasetInformation/DatasetError.vue
@@ -2,7 +2,7 @@
import { library } from "@fortawesome/fontawesome-svg-core";
import { faBug } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/vue-fontawesome";
-import { BAlert, BButton } from "bootstrap-vue";
+import { BAlert, BButton, BCard } from "bootstrap-vue";
import { storeToRefs } from "pinia";
import { computed, onMounted, ref } from "vue";
@@ -155,6 +155,21 @@ onMounted(async () => {
>.
+ Possible Causes
+
+
+ We can use AI to analyze the issue and suggest possible fixes. Please note that the diagnosis may
+ not always be accurate.
+
+
+
+
+
+
{
- What might have happened?
-
-
-
-
Issue Report
diff --git a/client/src/components/GalaxyWizard.vue b/client/src/components/GalaxyWizard.vue
index cd6fc9a5d90b..e648a3cda701 100644
--- a/client/src/components/GalaxyWizard.vue
+++ b/client/src/components/GalaxyWizard.vue
@@ -1,56 +1,83 @@
+
-
-
Ask the wizard
+
+ @keyup.enter="submitQuery" /> -->
+
+ {{ errorMessage }}
+
+
+ Let our Help Wizard Figure it out!
+
+
-
-
-
+
+
+
+
+
+
+
+
+
+ Was this answer helpful?
+
+
+
+
+
+
+ This feedback helps us improve our responses.
+ Thank you for your feedback!
-
- {{ queryResponse }}
-
-
-
- The text above is generated by an AI model. It's based on the input provided and may not always
- be accurate. Please don't hesitate to ask on the usual channels if you need more help.
-
-
-
@@ -88,8 +129,18 @@ function submitQuery() {
.chatResponse {
white-space: pre-wrap;
}
-.disclaimer {
- font-size: 0.8em;
- color: #666;
+.submitted svg {
+ animation: swoosh-up 1s forwards;
+}
+@keyframes swoosh-up {
+ 0% {
+ transform: translateY(0);
+ }
+ 50% {
+ transform: translateY(-20px);
+ }
+ 100% {
+ transform: translateY(0);
+ }
}
diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml
index 64ba9871bca4..e659efd6d21c 100644
--- a/lib/galaxy/config/schemas/config_schema.yml
+++ b/lib/galaxy/config/schemas/config_schema.yml
@@ -3929,6 +3929,13 @@ mapping:
desc: |
API key for OpenAI (https://openai.com/) to enable the wizard (or more?)
+ openai_model:
+ type: str
+ default: gpt-4o
+ required: false
+ desc: |
+ OpenAI model to enable the wizard.
+
enable_tool_recommendations:
type: bool
default: false
diff --git a/lib/galaxy/managers/chat.py b/lib/galaxy/managers/chat.py
new file mode 100644
index 000000000000..6eb1484fa3c7
--- /dev/null
+++ b/lib/galaxy/managers/chat.py
@@ -0,0 +1,111 @@
+from typing import Optional
+
+from fastapi import Path
+from sqlalchemy import select
+from sqlalchemy.exc import (
+ MultipleResultsFound,
+ NoResultFound,
+)
+from typing_extensions import Annotated
+
+from galaxy.exceptions import (
+ InconsistentDatabase,
+ InternalServerError,
+ RequestParameterInvalidException,
+)
+from galaxy.managers import base
+from galaxy.managers.context import ProvidesUserContext
+from galaxy.model import ChatExchange
+from galaxy.model.base import transaction
+from galaxy.schema.fields import DecodedDatabaseIdField
+from galaxy.util import unicodify
+
+JobIdPathParam = Optional[
+ Annotated[
+ DecodedDatabaseIdField,
+ Path(title="Job ID", description="The Job ID the chat exchange is linked to."),
+ ]
+]
+
+MessageIdPathParam = Optional[
+ Annotated[
+ DecodedDatabaseIdField,
+ Path(title="Job ID", description="The ChatMessage ID."),
+ ]
+]
+
+
+class ChatManager(base.ModelManager[ChatExchange]):
+ """
+ Business logic for chat exchanges.
+ """
+
+ model_class = ChatExchange
+
+ def create(self, trans: ProvidesUserContext, job_id: JobIdPathParam, response: str) -> ChatExchange:
+ """
+ Create a new chat exchange in the DB. Currently these are *only* job-based chat exchanges, will need to generalize down the road.
+ :param job_id: id of the job to associate the response with
+ :type job_id: int
+ :param response: the response to save in the DB
+ :type response: str
+ :returns: the created ChatExchange object
+ :rtype: galaxy.model.ChatExchange
+ :raises: InternalServerError
+ """
+ chat_exchange = ChatExchange(user=trans.user, job_id=job_id, message=response)
+ trans.sa_session.add(chat_exchange)
+ with transaction(trans.sa_session):
+ trans.sa_session.commit()
+ return chat_exchange
+
+ def get(self, trans: ProvidesUserContext, job_id: JobIdPathParam) -> ChatExchange:
+ """
+ Returns the chat response from the DB based on the given job id.
+ :param job_id: id of the job to load a response for from the DB
+ :type job_id: int
+ :returns: the loaded ChatExchange object
+ :rtype: galaxy.model.ChatExchange
+ :raises: InconsistentDatabase, InternalServerError
+ """
+ try:
+ stmt = select(ChatExchange).where(ChatExchange.job_id == job_id)
+ chat_response = self.session().execute(stmt).scalar_one()
+ except MultipleResultsFound:
+ # TODO: Unsure about this, isn't this more applicable when we're getting the response for response.id instead of response.job_id?
+ raise InconsistentDatabase("Multiple chat responses found with the same job id.")
+ except NoResultFound:
+ # TODO: Would there be cases where we raise an exception here? Or, is there a better way to return None?
+ # raise RequestParameterInvalidException("No accessible response found with the id provided.")
+ return None
+ except Exception as e:
+ raise InternalServerError(f"Error loading from the database.{unicodify(e)}")
+ return chat_response
+
+ def set_feedback_for_job(self, trans: ProvidesUserContext, job_id: JobIdPathParam, feedback: int) -> ChatExchange:
+ """
+ Set the feedback for a chat response.
+ :param message_id: id of the job to associate the feedback with
+ :type message_id: int
+ :param feedback: the feedback to save in the DB (0 or 1)
+ :type feedback: int
+ :returns: the updated ChatExchange object
+ :rtype: galaxy.model.ChatExchange
+ :raises: RequestParameterInvalidException
+ """
+
+ # TODO: Set feedback for specific messages as we allow multiple messages per exchange, not this method targeting job.
+
+ # Validate the feedback; it should be 0 or 1
+ if feedback not in [0, 1]:
+ raise RequestParameterInvalidException("Feedback should be 0 or 1.")
+
+ chat_exchange = self.get(trans, job_id)
+
+ # There is only one message in an exchange currently, so we can set the feedback on the first message
+ chat_exchange.messages[0].feedback = feedback
+
+ with transaction(trans.sa_session):
+ trans.sa_session.commit()
+
+ return chat_exchange
diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py
index 5de249d3a7d5..45c529c1cf99 100644
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2979,15 +2979,15 @@ class ChatExchange(Base, RepresentById):
__tablename__ = "chat_exchange"
id: Mapped[int] = mapped_column(primary_key=True)
- user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True)
+ user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=False)
+ job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True, nullable=True)
user: Mapped["User"] = relationship()
- messages: Mapped[List["ChatExchangeMessage"]] = relationship(
- back_populates="chat_exchange", cascade_backrefs=False
- )
+ messages: Mapped[List["ChatExchangeMessage"]] = relationship(back_populates="chat_exchange", cascade_backrefs=False)
- def __init__(self, user, message, **kwargs):
+ def __init__(self, user, job_id, message, **kwargs):
self.user = user
+ self.job_id = job_id
self.messages = [ChatExchangeMessage(message=message)]
def add_message(self, message):
@@ -2998,11 +2998,16 @@ class ChatExchangeMessage(Base, RepresentById):
__tablename__ = "chat_exchange_message"
id: Mapped[int] = mapped_column(primary_key=True)
- chat_exchange_id: Mapped[int] = mapped_column(Integer, ForeignKey("chat_exchange.id"), index=True)
- create_time: Mapped[datetime] = mapped_column(DateTime, default=func.now)
+ chat_exchange_id: Mapped[int] = mapped_column(ForeignKey("chat_exchange.id"), index=True)
+ create_time: Mapped[datetime] = mapped_column(default=now, nullable=True)
message: Mapped[str] = mapped_column(Text)
+ feedback: Mapped[int] = mapped_column(Integer, nullable=True)
chat_exchange: Mapped["ChatExchange"] = relationship("ChatExchange", back_populates="messages")
+ def __init__(self, message, feedback=None):
+ self.message = message
+ self.feedback = feedback
+
class Group(Base, Dictifiable, RepresentById):
__tablename__ = "galaxy_group"
diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/cbc46035eba0_chat_exchange_storage.py b/lib/galaxy/model/migrations/alembic/versions_gxy/cbc46035eba0_chat_exchange_storage.py
index 83b8afc2fded..c5767ac7ace7 100644
--- a/lib/galaxy/model/migrations/alembic/versions_gxy/cbc46035eba0_chat_exchange_storage.py
+++ b/lib/galaxy/model/migrations/alembic/versions_gxy/cbc46035eba0_chat_exchange_storage.py
@@ -12,9 +12,9 @@
from sqlalchemy import (
Column,
DateTime,
+ ForeignKey,
Integer,
Text,
- ForeignKey,
)
from galaxy.model.migrations.util import (
diff --git a/lib/galaxy/schema/schema.py b/lib/galaxy/schema/schema.py
index 58f1b4e32fed..e758e03a6861 100644
--- a/lib/galaxy/schema/schema.py
+++ b/lib/galaxy/schema/schema.py
@@ -3710,13 +3710,13 @@ class MaterializeDatasetInstanceRequest(MaterializeDatasetInstanceAPIRequest):
class ChatPayload(Model):
query: str = Field(
...,
- title="Message",
- description="The message to be sent to the chat.",
+ title="Query",
+ description="The query to be sent to the chatbot.",
)
context: Optional[str] = Field(
default="",
title="Context",
- description="A context identifier to be used by the chat.",
+ description="The context for the chatbot.",
)
diff --git a/lib/galaxy/webapps/galaxy/api/chat.py b/lib/galaxy/webapps/galaxy/api/chat.py
index 817b88962510..ed7a5a36ec82 100644
--- a/lib/galaxy/webapps/galaxy/api/chat.py
+++ b/lib/galaxy/webapps/galaxy/api/chat.py
@@ -4,59 +4,125 @@
import logging
-try:
- import openai
-except ImportError:
- openai = None
-
from galaxy.config import GalaxyAppConfiguration
+from galaxy.exceptions import ConfigurationError
+from galaxy.managers.chat import (
+ ChatManager,
+ JobIdPathParam,
+)
from galaxy.managers.context import ProvidesUserContext
+from galaxy.schema.schema import ChatPayload
from galaxy.webapps.galaxy.api import (
depends,
DependsOnTrans,
Router,
)
-from galaxy.exceptions import ConfigurationError
-from galaxy.schema.schema import ChatPayload
+
+try:
+ import openai
+except ImportError:
+ openai = None
log = logging.getLogger(__name__)
router = Router(tags=["chat"])
-PROMPT = """
-You are a juestion answering agent, expert on the Galaxy analysis platform and in the fields of computer science, bioinformatics, and genomics.
-You will try to answer questions about Galaxy, and if you don't know the answer you will state that.
+DEFAULT_PROMPT = """
+Please only say that something went wrong when configuing the ai prompt in your response.
"""
@router.cbv
class ChatAPI:
config: GalaxyAppConfiguration = depends(GalaxyAppConfiguration)
+ chat_manager: ChatManager = depends(ChatManager)
@router.post("/api/chat")
- def query(self, query: ChatPayload, trans: ProvidesUserContext = DependsOnTrans) -> str:
+ def query(
+ self,
+ job_id: JobIdPathParam,
+ payload: ChatPayload,
+ trans: ProvidesUserContext = DependsOnTrans,
+ ) -> str:
"""We're off to ask the wizard"""
- if openai is None or self.config.openai_api_key is None:
+ answer = None
+
+ if job_id:
+ existing_response = self.chat_manager.get(trans, job_id)
+ # Currently job-based chat exchanges are the only ones supported,
+ # and will only have the one message.
+ # TODO: Support regenerating the response as a new message, and
+ # asking follow-up questions.
+ if existing_response and existing_response.messages[0]:
+ answer = existing_response.messages[0].message
+
+ if not answer:
+ self._ensure_openai_configured()
+
+ messages = self._build_messages(payload, trans)
+ log.debug(f"CHATGPT messages: {messages}")
+
+ response = self._call_openai(messages)
+ answer = response.choices[0].message.content
+
+ # TODO: Maybe we need to first check if the job_id exists (in the `job` table)?
+ if job_id:
+ self.chat_manager.create(trans, job_id, answer)
+
+ return answer
+
+ @router.put("/api/chat/{job_id}/feedback")
+ def feedback(
+ self,
+ job_id: JobIdPathParam,
+ feedback: int,
+ trans: ProvidesUserContext = DependsOnTrans,
+ ) -> int:
+ """Provide feedback on the chatbot response."""
+ chat_response = self.chat_manager.set_feedback_for_job(trans, job_id, feedback)
+ return chat_response.messages[0].feedback
+
+ def _ensure_openai_configured(self):
+ """Ensure OpenAI is available and configured with an API key."""
+ if openai is None:
+ raise ConfigurationError("OpenAI is not installed. Please install openai to use this feature.")
+ if self.config.openai_api_key is None:
raise ConfigurationError("OpenAI is not configured for this instance.")
- client = openai.OpenAI(
- api_key=self.config.openai_api_key,
- )
+ openai.api_key = self.config.openai_api_key
+ def _get_system_prompt(self) -> str:
+ """Get the system prompt for OpenAI."""
+ return self.config.chat_prompts.get("tool_error", DEFAULT_PROMPT)
+
+ def _build_messages(self, payload: ChatPayload, trans: ProvidesUserContext) -> list:
+ """Build the message array to send to OpenAI."""
messages = [
- {"role": "system", "content": PROMPT},
- {"role": "user", "content": query.query},
+ {"role": "system", "content": self._get_system_prompt()},
+ {"role": "user", "content": payload.query},
]
- if query.context == "tool_error":
- msg = "The user will provide you a Galaxy tool error, and you will try to explain the error and provide a very concise solution. No more than a paragraph."
- messages.append({"role": "system", "content": msg})
+ user_msg = self._get_user_context_message(trans)
+ if user_msg:
+ messages.append({"role": "system", "content": user_msg})
+ return messages
- completion = client.chat.completions.create(
- model="gpt-4o",
- messages=messages,
- temperature=0,
- )
- answer = completion.choices[0].message.content
- return answer
+ def _get_user_context_message(self, trans: ProvidesUserContext) -> str:
+ """Generate a user context message based on the user's information."""
+ user = trans.user
+ if user:
+ log.debug(f"CHATGPTuser: {user.username}")
+ return f"You will address the user as {user.username}"
+ return "You will address the user as Anonymous User"
+
+ def _call_openai(self, messages: list):
+ """Send a chat request to OpenAI and handle exceptions."""
+ try:
+ return openai.chat.completions.create(
+ model=self.config.openai_model,
+ messages=messages,
+ )
+ except Exception as e:
+ log.error(f"Error calling OpenAI: {e}")
+ raise ConfigurationError("An error occurred while communicating with OpenAI.")