Skip to content

Commit

Permalink
Fix multiturn dialogue prompt and chat_history
Browse files Browse the repository at this point in the history
  • Loading branch information
wwxxzz committed Dec 20, 2024
1 parent fd31306 commit 37bb117
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 19 deletions.
1 change: 0 additions & 1 deletion src/pai_rag/app/web/rag_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ def query(
with_intent=with_intent,
index_name=index_name,
)
print(q)
r = requests.post(self.query_url, json=q, stream=True)
if r.status_code != HTTPStatus.OK:
raise RagApiError(code=r.status_code, msg=r.text)
Expand Down
17 changes: 15 additions & 2 deletions src/pai_rag/app/web/tabs/chat_tab.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ def respond(input_elements: List[Any]):
index_name = update_dict["chat_index"]
citation = update_dict["citation"]

print('update_dict["include_history"]', update_dict["include_history"])
if not update_dict["include_history"]:
chatbot, _ = clear_history(chatbot)
if chatbot is not None:
chatbot.append((msg, ""))
yield chatbot
Expand Down Expand Up @@ -65,7 +68,6 @@ def respond(input_elements: List[Any]):
citation=citation,
index_name=index_name,
)

for resp in response_gen:
chatbot[-1] = (msg, resp.result)
yield chatbot
Expand Down Expand Up @@ -114,6 +116,7 @@ def create_chat_tab() -> Dict[str, Any]:
label="Chat history",
info="Query with chat history.",
elem_id="include_history",
value=True,
)

with gr.Column(visible=True) as vs_col:
Expand Down Expand Up @@ -385,7 +388,17 @@ def change_query_radio(query_type):

with gr.Column(scale=8):
chatbot = gr.Chatbot(height=500, elem_id="chatbot")
question = gr.Textbox(label="Enter your question.", elem_id="question")
with gr.Row():
include_history = gr.Checkbox(
label="Chat history",
info="Query with chat history.",
elem_id="include_history",
value=True,
scale=1,
)
question = gr.Textbox(
label="Enter your question.", elem_id="question", scale=9
)
with gr.Row():
submitBtn = gr.Button("Submit", variant="primary")
clearBtn = gr.Button("Clear History", variant="secondary")
Expand Down
17 changes: 16 additions & 1 deletion src/pai_rag/integrations/chat_store/pai/pai_chat_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from pydantic import BaseModel
from llama_index.core.bridge.pydantic import PrivateAttr
from loguru import logger
from collections import deque

CHAT_STORE_FILE = "chat_store.json"
DEFAULT_LOCAL_STORAGE_PATH = "./localdata/storage/"
Expand Down Expand Up @@ -74,7 +75,21 @@ def set_messages(self, key: str, messages: List[ChatMessage]) -> None:

def get_messages(self, key: str) -> List[ChatMessage]:
"""Get messages for a key."""
return self._chat_store.get_messages(key)
default_messages = self._chat_store.get_messages(key)
recent_messages = deque(default_messages[-20:], maxlen=20)
assistant_count = 0
ret_messages = []
for msg in reversed(recent_messages):
if msg.role == "assistant":
if assistant_count < 2:
msg.content = msg.content[:200]
ret_messages.append(msg)
assistant_count += 1
else:
ret_messages.append(msg)

ret_messages.reverse()
return ret_messages

def add_message(self, key: str, message: ChatMessage) -> None:
"""Add a message for a key."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from llama_index.core.storage.chat_store.base import BaseChatStore

from pai_rag.utils.prompt_template import (
CONDENSE_QUESTION_CHAT_ENGINE_PROMPT_ZH,
CONDENSE_QUESTION_CHAT_ENGINE_PROMPT,
DEFAULT_FUSION_TRANSFORM_PROMPT,
)

Expand Down Expand Up @@ -157,7 +157,7 @@ def __init__(
resolve_llm(llm, callback_manager=callback_manager) if llm else Settings.llm
)
self._condense_question_prompt = (
condense_question_prompt or CONDENSE_QUESTION_CHAT_ENGINE_PROMPT_ZH
condense_question_prompt or CONDENSE_QUESTION_CHAT_ENGINE_PROMPT
)
self._chat_store = chat_store

Expand All @@ -176,7 +176,6 @@ def _run(self, query_bundle: QueryBundle, session_id, chat_history) -> QueryBund
"""Run query transform.
Generate standalone question from conversation context and last message."""
query_str = query_bundle.query_str

if chat_history is not None:
history_messages = parse_chat_messages(chat_history)
for hist_mes in history_messages:
Expand Down Expand Up @@ -223,7 +222,6 @@ async def _arun(
"""Run query transform.
Generate standalone question from conversation context and last message."""
query_str = query_bundle.query_str

if chat_history is not None:
history_messages = parse_chat_messages(chat_history)
for hist_mes in history_messages:
Expand All @@ -235,6 +233,7 @@ async def _arun(
return query_bundle

chat_history_str = messages_to_history_str(chat_history)
logger.debug(f"Chat history: {chat_history_str}")
query_bundle_str = await self._llm.apredict(
self._condense_question_prompt,
question=query_str,
Expand Down
41 changes: 30 additions & 11 deletions src/pai_rag/utils/prompt_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,32 +67,51 @@

CONDENSE_QUESTION_CHAT_ENGINE_PROMPT = PromptTemplate(
"""\
Given a conversation (between Human and Assistant) and a follow up message from Human, \
rewrite the message to be a standalone question that captures all relevant context \
from the conversation.
Please play the role of an intelligent search rewriting and completion robot. According to the user's chat history and the corresponding new question, please first rewrite the subject inheritance of the new question, and then complete the context information. Note: Do not change the meaning of the new question, the answer should be as concise as possible, do not directly answer the question, and do not output more content.
Example:
<Chat history>
User: What did you do this morning?
Assistant: Go play basketball
<Chat History>
<New question>
User: Is it fun?
Answer:
Answer: Is playing basketball fun?
Now it's your turn:
<Chat history>
{chat_history}
<Follow Up Message>
<New question>
{question}
<Standalone question>
Please think carefully and give your answer using the same language as the <New question>:
"""
)

CONDENSE_QUESTION_CHAT_ENGINE_PROMPT_ZH = PromptTemplate(
"""\
给定一次对话(人类和助理之间)以及来自人类的后续消息,\
将消息重写为一个独立的问题,捕获对话中的所有相关上下文。
请你扮演一个智能搜索改写补全机器人,请根据User的聊天历史以及对应的新问题,对新问题先进行主语继承改写,然后进行上下文信息补全,注意:不要改变新问题的意思,答案要尽可能简洁,不要直接回答该问题,不要输出多于的内容。
例子:
<聊天历史>
User:今天上午你干嘛了
Assistant:去打篮球啦
<新问题>
User:好玩吗?
答案:
答案:打篮球好玩吗?
<聊天记录>
现在轮到你了:
<聊天历史>
{chat_history}
<后续消息>
<新问题>
{question}
<独立问题>
请仔细思考后,使用和<新问题>相同的语言,给出你的答案:
"""
)

Expand Down

0 comments on commit 37bb117

Please sign in to comment.