Skip to content

Commit

Permalink
Merge pull request #544 from Anhforth/master
Browse files Browse the repository at this point in the history
changed aquila prediction
  • Loading branch information
BAAI-OpenPlatform authored Oct 12, 2023
2 parents 44cde68 + b537a4d commit 0205d1f
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 3 deletions.
17 changes: 17 additions & 0 deletions flagai/model/aquila2/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,8 @@ def get_conversation_template(model_path: str) -> Conversation:
return get_conv_template("aquila-v1")
elif "aquila-chat" in model_path:
return get_conv_template("aquila-chat")
elif "aquila-legacy" in model_path:
return get_conv_template("aquila-legacy")
else:
return get_conv_template("aquila")

Expand All @@ -182,6 +184,21 @@ def get_conversation_template(model_path: str) -> Conversation:
)
)

register_conv_template(
Conversation(
name="aquila-legacy",
system_message="A chat between a curious human and an artificial intelligence assistant. "
"The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n",
roles=("### Human: ", "### Assistant: ", "System"),
messages=(),
offset=0,
sep_style=SeparatorStyle.NO_COLON_TWO,
sep="\n",
sep2="</s>",
stop_str=["</s>", "[UNK]"],
)
)

register_conv_template(
Conversation(
name="aquila",
Expand Down
16 changes: 13 additions & 3 deletions flagai/model/aquila2/modeling_aquila.py
Original file line number Diff line number Diff line change
Expand Up @@ -920,13 +920,23 @@ def predict(self, text, tokenizer=None,
max_gen_len=200, top_p=0.95,
seed=1234, topk=100,
temperature=0.9,
sft=True, convo_template = "aquila-chat",
device = "cuda"):
sft=True, convo_template = "",
device = "cuda",
model_name="AquilaChat2-7B",
**kwargs):

vocab = tokenizer.get_vocab()
#device = device

id2word = {v:k for k, v in vocab.items()}



template_map = {"AquilaChat2-7B": "aquila-v1",
"AquilaChat2-34B": "aquila-legacy",
"AquilaChat2-7B-16K": "aquila",
"AquilaChat2-34B-16K": "aquila-v1"}
if not convo_template:
convo_template=template_map.get(model_name, "aquila-chat")

set_random_seed(seed)
if temperature == 0:
Expand Down

0 comments on commit 0205d1f

Please sign in to comment.