diff --git a/R/history.R b/R/history.R index 08c21c1..d7fb726 100644 --- a/R/history.R +++ b/R/history.R @@ -76,7 +76,7 @@ create_history <- function(file_name = "chat_history", if (!dir.exists(tools::file_path_sans_ext(file_path))) { dir.create(tools::file_path_sans_ext(file_path), - recursive = TRUE, showWarnings = FALSE + recursive = TRUE, showWarnings = FALSE ) } @@ -119,7 +119,7 @@ save_user_history <- function(file_name = "chat_history", get_query_embedding <- function(query, local = FALSE, model = NULL) { if (local) { create_text_embeddings(query, - model = model + model = model ) |> dplyr::pull(embedding) |> unlist() @@ -217,8 +217,8 @@ chat_with_context <- function(query, if (rlang::is_true(add_context) || rlang::is_true(add_history)) { cli_alert_info("Creating embedding from query.") query_embedding <- get_query_embedding(query, - local = local, - model = embedding_model + local = local, + model = embedding_model ) } @@ -269,35 +269,35 @@ chat_with_context <- function(query, prompt_instructions <- switch(task, - "Context Only" = - list( - list( - role = "system", - content = - glue( - "You are a helpful chat bot that answers questions based on + "Context Only" = + list( + list( + role = "system", + content = + glue( + "You are a helpful chat bot that answers questions based on the context provided by the user. If the user does not provide related context and you need context to respond accurately, say \"I am not able to answer that question. Maybe try rephrasing your question in a different way.\"" - ) - ) - ), - "Permissive Chat" = - list( - list( - role = "system", - content = - glue( - "You are a helpful chat bot that answers questions based on + ) + ) + ), + "Permissive Chat" = + list( + list( + role = "system", + content = + glue( + "You are a helpful chat bot that answers questions based on on the context provided by the user. If the user does not provide context and you need context to respond correctly, answer the quest but first say \"I am not able to answer that question with the context you gave me, but here is my best but here is my best answer." - ) - ) - ) + ) + ) + ) ) prompt_context <- list( diff --git a/R/index.R b/R/index.R index a0dc16b..3ca81c9 100755 --- a/R/index.R +++ b/R/index.R @@ -80,12 +80,12 @@ load_index <- function(domain, local_embeddings = FALSE) { if (local_embeddings) { sample_index <- system.file("sample-index/local/jameshwade-github-io-gpttools.parquet", - package = "gpttools" + package = "gpttools" ) } else { sample_index <- system.file("sample-index/jameshwade-github-io-gpttools.parquet", - package = "gpttools" + package = "gpttools" ) } index <- arrow::read_parquet(sample_index) diff --git a/inst/retriever/app.R b/inst/retriever/app.R index e06049c..caa4309 100644 --- a/inst/retriever/app.R +++ b/inst/retriever/app.R @@ -115,7 +115,7 @@ ui <- page_fillable( selected = getOption("gpttools.service", "openai") ), selectInput("model", "Model", - choices = NULL + choices = NULL ), selectInput( "embed_model", "OpenAI Embedding Model", @@ -276,8 +276,8 @@ server <- function(input, output, session) { ) ) observe(updateSelectInput(session, "source", - choices = c("All", indices()), - selected = getOption("gpttools.sources", "All") + choices = c("All", indices()), + selected = getOption("gpttools.sources", "All") )) observe({ toggle_popover("settings", show = FALSE)