Skip to content

Commit

Permalink
Merge pull request #16 from sugarforever/feature/openai-support
Browse files Browse the repository at this point in the history
feature: openai models support in knowledgebase chat
  • Loading branch information
sugarforever authored Mar 8, 2024
2 parents 41d23fb + 256fbf7 commit eb70233
Show file tree
Hide file tree
Showing 8 changed files with 125 additions and 20 deletions.
29 changes: 21 additions & 8 deletions components/Chat.vue
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,15 @@ import MarkdownItSub from "markdown-it-sub";
import MarkdownItSup from "markdown-it-sup";
import MarkdownItTasklists from "markdown-it-task-lists";
import MarkdownItTOC from "markdown-it-toc-done-right";
import { loadOllamaHost, loadOllamaUserName, loadOllamaPassword, loadOllamaInstructions } from '@/utils/settings';
import {
loadOllamaHost,
loadOllamaUserName,
loadOllamaPassword,
loadOllamaInstructions,
loadKey,
OPENAI_API_KEY,
ANTHROPIC_API_KEY
} from '@/utils/settings';
const props = defineProps({
knowledgebase: Object
Expand Down Expand Up @@ -114,9 +122,11 @@ const onSend = async () => {
method: 'POST',
body: body,
headers: {
'x_ollama_host': loadOllamaHost(),
'x_ollama_username': loadOllamaUserName(),
'x_ollama_password': loadOllamaPassword(),
'x_ollama_host': loadOllamaHost() || '',
'x_ollama_username': loadOllamaUserName() || '',
'x_ollama_password': loadOllamaPassword() || '',
'x_openai_api_key': loadKey(OPENAI_API_KEY) || '',
'x_anthropic_api_key': loadKey(ANTHROPIC_API_KEY) || '',
'Content-Type': 'application/json',
},
});
Expand All @@ -143,6 +153,7 @@ onMounted(() => {
});
</script>
<template>
<div class="flex flex-col flex-1 p-4">
<div class="flex flex-row items-center justify-between mb-4 pb-4">
Expand All @@ -153,10 +164,12 @@ onMounted(() => {
<ModelsDropdown @modelSelected="onModelSelected" />
</div>
<div class="flex flex-row items-center justify-between mb-4 pb-4 border-b border-b-gray-200">
<UDropdown :items="instructions" :popper="{ placement: 'bottom-start' }">
<UButton color="white" :label="`${selectedInstruction ? selectedInstruction.name : 'Select Instruction'}`"
trailing-icon="i-heroicons-chevron-down-20-solid" />
</UDropdown>
<ClientOnly>
<UDropdown :items="instructions" :popper="{ placement: 'bottom-start' }">
<UButton color="white" :label="`${selectedInstruction ? selectedInstruction.name : 'Select Instruction'}`"
trailing-icon="i-heroicons-chevron-down-20-solid" />
</UDropdown>
</ClientOnly>
</div>
<div dir="ltr" class="relative overflow-y-scroll flex-1 space-y-4">
<ul className="flex flex-1 flex-col">
Expand Down
9 changes: 6 additions & 3 deletions components/ModelsDropdown.vue
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@ const label = computed(() => selectedModel.value ? selectedModel.value : "Models
const loadModels = async () => {
const response = await $fetch('/api/models/', {
headers: {
'x_ollama_host': loadOllamaHost(),
'x_ollama_username': loadOllamaUserName(),
'x_ollama_password': loadOllamaPassword()
'x_ollama_host': loadOllamaHost() || '',
'x_ollama_username': loadOllamaUserName() || '',
'x_ollama_password': loadOllamaPassword() || '',
'x_openai_api_key': loadKey(OPENAI_API_KEY) || '',
'x_anthropic_api_key': loadKey(ANTHROPIC_API_KEY) || ''
}
});
return response.models;
Expand All @@ -35,6 +37,7 @@ onMounted(async () => {
})
</script>

<template>
<ClientOnly>
<UDropdown :items="models" :popper="{ placement: 'bottom-start' }">
Expand Down
30 changes: 26 additions & 4 deletions pages/settings/index.vue
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
<script setup>
import {
loadKey,
saveKey,
loadOllamaHost,
saveOllamaHost,
loadOllamaUserName,
saveOllamaUserName,
loadOllamaPassword,
saveOllamaPassword
saveOllamaPassword,
OPENAI_API_KEY,
ANTHROPIC_API_KEY
} from '@/utils/settings';
const toast = useToast();
Expand All @@ -21,7 +25,9 @@ const save = (host, authorization, username, password) => {
const state = reactive({
host: undefined,
username: undefined,
password: undefined
password: undefined,
openaiApiKey: undefined,
anthropicApiKey: undefined
});
const saving = ref(false);
Expand All @@ -40,13 +46,17 @@ const validate = (state) => {
const onSubmit = async () => {
console.log("Submitting: ", state.host.trim());
save(state.host.trim(), authorization.value, state.username, state.password);
saveKey(OPENAI_API_KEY, state.openaiApiKey);
saveKey(ANTHROPIC_API_KEY, state.anthropicApiKey);
toast.add({ title: `Ollama server set to ${state.host.trim()} successfully!` });
};
onMounted(() => {
state.host = loadOllamaHost();
state.username = loadOllamaUserName();
state.password = loadOllamaPassword();
state.openaiApiKey = loadKey(OPENAI_API_KEY);
state.anthropicApiKey = loadKey(ANTHROPIC_API_KEY);
if (state.username && state.password) {
authorization.value = true;
Expand All @@ -59,8 +69,8 @@ onMounted(() => {

<template>
<div class="w-[640px]">
<Heading label="Ollama Server Setting" />
<UForm :validate="validate" :state="state" class="space-y-4" @submit="onSubmit">
<Heading label="Ollama Server Setting" />
<UFormGroup label="Host" name="host">
<UInput v-model="state.host" />
</UFormGroup>
Expand All @@ -75,7 +85,19 @@ onMounted(() => {
<UInput v-model="state.password" type="password" />
</UFormGroup>
</UFormGroup>


<Heading label="API Keys" class="pt-4" />

<UFormGroup>
<UFormGroup label="OpenAI" name="openai" class="mb-2">
<UInput v-model="state.openaiApiKey" type="password" />
</UFormGroup>

<UFormGroup label="Anthropic" name="anthropic">
<UInput v-model="state.anthropicApiKey" type="password" />
</UFormGroup>
</UFormGroup>

<UButton type="submit" :loading="saving">
Save
</UButton>
Expand Down
23 changes: 18 additions & 5 deletions server/api/models/chat/index.post.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts
import { RunnablePassthrough, RunnableSequence } from "@langchain/core/runnables";
import { setEventStreamResponse, FetchWithAuth } from '@/server/utils';
import { PrismaClient } from '@prisma/client';
import { OPENAI_MODELS } from '@/server/utils/models';
import { ChatOpenAI } from '@langchain/openai';

const SYSTEM_TEMPLATE = `Answer the user's questions based on the below context.
Your answer should be in the format of Markdown.
Expand All @@ -25,6 +27,7 @@ export default defineEventHandler(async (event) => {
setEventStreamResponse(event);

const { host, username, password } = event.context.ollama;
const { x_openai_api_key: openai_api_key, x_anthropic_api_key } = event.context.keys;
const { knowledgebaseId, model, messages, stream } = await readBody(event);

if (knowledgebaseId) {
Expand All @@ -43,7 +46,7 @@ export default defineEventHandler(async (event) => {

const embeddings = new OllamaEmbeddings({
model: `${knowledgebase.embedding}`,
baseUrl: "http://localhost:11434",
baseUrl: host,
});
const retriever = new Chroma(embeddings, {
collectionName: `collection_${knowledgebase.id}`
Expand All @@ -54,10 +57,20 @@ export default defineEventHandler(async (event) => {
new MessagesPlaceholder("messages"),
]);

const chat = new ChatOllama({
baseUrl: "http://localhost:11434",
model: model,
});
let chat = null;
if (OPENAI_MODELS.includes(model)) {
console.log("Chat with OpenAI");
chat = new ChatOpenAI({
openAIApiKey: openai_api_key,
modelName: model
})
} else {
console.log("Chat with Ollama");
chat = new ChatOllama({
baseUrl: host,
model: model,
})
};

const query = messages[messages.length - 1].content
console.log("User query: ", query);
Expand Down
19 changes: 19 additions & 0 deletions server/api/models/index.get.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,28 @@
import { Ollama } from 'ollama'
import { FetchWithAuth } from '@/server/utils';
import { OPENAI_MODELS } from '@/server/utils/models';

export default defineEventHandler(async (event) => {
const { host, username, password } = event.context.ollama;
const { x_openai_api_key: openai_api_key, x_anthropic_api_key } = event.context.keys;
console.log("openai_api_key: ", openai_api_key);
const ollama = new Ollama({ host, fetch: FetchWithAuth.bind({ username, password }) });
const response = await ollama.list();

if (openai_api_key) {
OPENAI_MODELS.forEach((model) => {
response.models.push({
name: model,
model: model,
details: {
family: 'OpenAI'
}
});
});

}

console.log(response);

return response
})
15 changes: 15 additions & 0 deletions server/middleware/keys.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
const KEYS = [
'x_openai_api_key',
'x_anthropic_api_key'
];

export default defineEventHandler((event) => {
const headers = getRequestHeaders(event);
const keys: { [key: string]: any } = {};

for (const key of KEYS) {
keys[key] = headers[key];
}

event.context.keys = keys;
})
6 changes: 6 additions & 0 deletions server/utils/models.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
export const OPENAI_MODELS = [
"gpt-3.5-turbo",
"gpt-4",
"gpt-4-32k",
"gpt-4-turbo-preview",
];
14 changes: 14 additions & 0 deletions utils/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,20 @@ const OLLAMA_HOST = 'ollama.host';
const OLLAMA_USERNAME = 'ollama.username';
const OLLAMA_PASSWORD = 'ollama.password';
const OLLAMA_INSTRUCTIONS = 'ollama.instructions';
export const OPENAI_API_KEY = 'keys.openai_api_key';
export const ANTHROPIC_API_KEY = 'keys.anthropic_api_key';

export const loadKey = (name: string) => {
return localStorage.getItem(name);
};

export const saveKey = (name: string, value: string | null) => {
if (value) {
localStorage.setItem(name, value);
} else {
localStorage.removeItem(name);
}
}

export const loadOllamaHost = () => {
const host = localStorage.getItem(OLLAMA_HOST);
Expand Down

0 comments on commit eb70233

Please sign in to comment.