Skip to content

Commit

Permalink
Add Meta-prompting
Browse files Browse the repository at this point in the history
  • Loading branch information
svilupp authored May 6, 2024
2 parents 0fa7b76 + a6a308f commit 7d52b63
Show file tree
Hide file tree
Showing 12 changed files with 423 additions and 77 deletions.
16 changes: 14 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]

### Added
- First iteration of the GUI

### Fixed
Chat tab
- added delete icon to the last message in the conversation (for easy deletion)
- added a button in "Advanced Settings" to "Fork a conversation" (save to history for reference, but continue from a fresh copy)
- added a focus when a template is selected (on expand, on template selection, etc)

Meta-prompting tab
- Add an experimental meta-prompting experience based on [arxiv](https://arxiv.org/pdf/2401.12954). See the tab "Meta-Prompting" for more details.

### Fixed

## [0.1.0]

### Added
- The first iteration of the GUI released
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@ authors = ["J S <[email protected]> and contributors"]
version = "0.2.0-DEV"

[deps]
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
GenieFramework = "a59fdf5c-6bf0-4f5d-949c-a137c9e2f353"
GenieSession = "03cc5b98-4f21-4eb6-99f2-22eced81f962"
PromptingTools = "670122d1-24a8-4d70-bfce-740807c42192"

[compat]
Aqua = "0.7"
Dates = "<0.0.1, 1"
GenieFramework = "2.1"
GenieSession = "1"
PromptingTools = "0.23"
Expand Down
96 changes: 73 additions & 23 deletions app.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true)
@in selected_page = "chat"
@in ministate = true
# configuration
## @in chat_tracker_tokens_in = 0
## @in chat_tracker_tokens_out = 0
## @in chat_tracker_cost = 0.0
@in model = isempty(PT.GROQ_API_KEY) ? "gpt4t" : "gllama370"
@in model_input = ""
@in model_submit = false
Expand Down Expand Up @@ -80,6 +83,15 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true)
@in chat_reset = false
@in chat_rm_last_msg = false
@in chat_fork = false
# Meta Prompting
@in meta_submit = false
@in meta_reset = false
@in meta_disabled = false
@in meta_question = ""
@in meta_rounds_max = 5
@in meta_rounds_current = 0
@in meta_displayed = Dict{Symbol, Any}[]
@in meta_rm_last_msg = false
# Template browser
@in template_filter = ""
@in template_submit = false
Expand Down Expand Up @@ -118,32 +130,18 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true)
end
@onbutton chat_reset begin
@info "> Chat Reset!"
timestamp = Dates.format(now(), "YYYYmmdd_HHMMSS")
name = "Conv. @ $timestamp"
## update the chat with edits by the user
conv_rendered = render_messages(conv_displayed, chat_template_variables)
# Conv. display is already up-to-date, no need to update it!
label = label_conversation(conv_rendered; model = model)
if HISTORY_SAVE
label_clean = replace(label, r"[:\s\"]+" => "_") |> lowercase
## save to disk + to chat history
path = joinpath(
HISTORY_DIR, "conversation__$(timestamp)__$(label_clean).json")
PT.save_conversation(path, conv_rendered)
@info "> Chat saved to $path"
end
history = push!(history,
Dict(:name => name, :label => label, :messages => conv_rendered))

record = save_conversation(
conv_displayed; save = HISTORY_SAVE, save_path = HISTORY_DIR,
variables = chat_template_variables, model = model)
history = push!(history, record)
## clean the chat
conv_displayed = empty!(conv_displayed)
chat_template_variables = empty!(chat_template_variables)
chat_question, chat_auto_template, chat_template_selected = "", "", ""
chat_disabled, chat_advanced_expanded, chat_template_expanded = false, false, false
# set defaults again
chat_code_airetry, chat_code_eval = false, false
chat_code_prefix = ""
chat_temperature = 0.7
chat_code_prefix, chat_temperature = "", 0.7
end
@onbutton chat_submit begin
chat_disabled = true
Expand Down Expand Up @@ -233,6 +231,47 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true)
chat_reset = true
conv_displayed = conv_displayed_temp
end
### Meta-prompting
@onbutton meta_submit begin
meta_disabled = true
if meta_rounds_current < meta_rounds_max
# we skip prepare_conversation to avoid create user+system prompt when we start, just grab the messages
conv_current = render_messages(meta_displayed)
while meta_rounds_current < meta_rounds_max
meta_rounds_current = meta_rounds_current + 1
## update conv, but indicate if it's final_answer
early_stop, conv_current = meta_prompt_step!(
conv_current; counter = meta_rounds_current, model = model, question = meta_question)
meta_displayed = [msg2display(msg; id)
for (id, msg) in enumerate(conv_current)]
early_stop && break
end
elseif meta_question != ""
@info "> Meta-prompting follow up question!"
conv = prepare_conversation(meta_displayed; question = meta_question)
conv_current = send_to_model(conv; model = model)
meta_displayed = [msg2display(msg; id)
for (id, msg) in enumerate(conv_current)]
end
meta_disabled, meta_question = false, ""
end
@onbutton meta_reset begin
@info "> Meta-Prompting Reset!"
record = save_conversation(
meta_displayed; save = HISTORY_SAVE, save_path = HISTORY_DIR,
model = model, file_prefix = "conversation__meta")
history = push!(history, record)
## clean the messages
meta_rounds_current = 0
meta_displayed = empty!(meta_displayed)
meta_disabled, meta_question, meta_rounds_current = false, "", 0
end
@onbutton meta_rm_last_msg begin
@info "> Deleting last turn!"
meta_rounds_current = meta_rounds_current - 1
pop!(meta_displayed)
meta_displayed = meta_displayed
end
### Template browsing behavior
@onbutton template_submit begin
@info "> Template filter: $template_filter"
Expand Down Expand Up @@ -296,7 +335,7 @@ end
this.$refs.tpl_select.focus();
});
},
filterFn (val, update) {
filterFn(val, update) {
if (val === '') {
update(() => {
// reset to full option list
Expand All @@ -310,7 +349,7 @@ end
this.chat_template_options = this.chat_template_options_all.filter(v => v.toLowerCase().indexOf(needle) > -1)
})
},
filterFnAuto (val, update) {
filterFnAuto(val, update) {
if (val === '') {
update(() => {
// reset to full option list
Expand All @@ -325,8 +364,7 @@ end
this.chat_auto_template_options = this.chat_auto_template_options_all.filter(v => v.toLowerCase().indexOf(needle) > -1)
})
},
copyToClipboard: function(index) {
console.log(index);
copyToClipboard(index) {
const str = this.conv_displayed[index].content; // extract the content of the element in position `index`
const el = document.createElement('textarea'); // Create a <textarea> element
el.value = str; // Set its value to the string that you want copied
Expand All @@ -337,6 +375,18 @@ end
el.select(); // Select the <textarea> content
document.execCommand('copy'); // Copy - only works as a result of a user action (e.g. click events)
document.body.removeChild(el); // Remove the <textarea> element
},
copyToClipboardMeta(index) {
const str = this.meta_displayed[index].content; // extract the content of the element in position `index`
const el = document.createElement('textarea'); // Create a <textarea> element
el.value = str; // Set its value to the string that you want copied
el.setAttribute('readonly', ''); // Make it readonly to be tamper-proof
el.style.position = 'absolute';
el.style.left = '-9999px'; // Move outside the screen to make it invisible
document.body.appendChild(el); // Append the <textarea> element to the HTML document
el.select(); // Select the <textarea> content
document.execCommand('copy'); // Copy - only works as a result of a user action (e.g. click events)
document.body.removeChild(el); // Remove the <textarea> element
}
"""
end
Expand Down
10 changes: 9 additions & 1 deletion src/ProToPortal.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
module ProToPortal

using Dates
using PromptingTools
const PT = PromptingTools
using PromptingTools: JSON3
using PromptingTools: AICode, last_message, last_output
using PromptingTools: AICode, last_message, last_output, save_conversation
using PromptingTools.Experimental.AgentTools: aicodefixer_feedback, airetry!, AICall,
AIGenerate
const AT = PromptingTools.Experimental.AgentTools
Expand All @@ -21,13 +22,17 @@ export render_messages, render_template_messages
export conversation2transcript, parse_critic, load_conversations_from_dir
include("utils.jl")

export save_conversation
include("serialization.jl")

export flash, flash_has_message
include("flash.jl")

export messagecard, templatecard
include("components.jl")

include("view_chat.jl")
include("view_meta.jl")

export ui, ui_login
include("view.jl")
Expand All @@ -39,6 +44,9 @@ export send_to_model, prepare_conversation, label_conversation, evaluate_code
export build_lazy_aicall, autofix_code
include("llm.jl")

export meta_prompt_step!
include("meta_prompting.jl")

function __init__()
## Load extra templates
PT.load_templates!(joinpath(@__DIR__, "..", "templates"); remember_path = true) # add our custom ones
Expand Down
26 changes: 21 additions & 5 deletions src/llm.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"Prepares the conversation for sending to the LLM"
function prepare_conversation(
display::Vector{Dict{Symbol, Any}}, placeholders::Vector{Dict{
Symbol, Any}};
Symbol, Any}} = Vector{Dict{Symbol, Any}}();
question::String = "", template::String = "", system_prompt::String = "")
conv = render_messages(display, placeholders)
if template != ""
Expand All @@ -28,15 +28,16 @@ function prepare_conversation(
end

"Code evaluator. Returns the evaluted code block (AICode) and the feedback string."
function evaluate_code(conv::AbstractVector{<:PT.AbstractMessage}; prefix = "")
@info "> Evaluating code"
function evaluate_code(conv::AbstractVector{<:PT.AbstractMessage};
prefix::String = "", header::String = "## Code Evaluation")
@info ">> Evaluating code"
cb = AICode(
last(conv); prefix, skip_unsafe = true, capture_stdout = true)
@info "> Code Success: $(isvalid(cb))"
@info ">> Code Success: $(isvalid(cb))"

### Build the response
io = IOBuffer()
println(io, "## Code Evaluation", "\n")
println(io, header, "\n")
if isvalid(cb)
println(io, "**Outcome:** Code is valid", "\n")
println(io, "**Output:**\n $(cb.stdout)", "\n")
Expand All @@ -50,6 +51,13 @@ function evaluate_code(conv::AbstractVector{<:PT.AbstractMessage}; prefix = "")
return cb, feedback
end

# Convenience function for evaluating code from string
# AIMessage is the better input because it handles code extraction with a few fallbacks!
function evaluate_code(str::AbstractString;
prefix::String = "", header::String = "## Code Evaluation")
evaluate_code([PT.AIMessage(str)]; prefix, header)
end

"Constructs AIGenerate call that mimics if it was just executed"
function build_lazy_aicall(conv::AbstractVector{<:PT.AbstractMessage};
model::String, max_retries::Int = 3, n_samples::Int = 2)
Expand Down Expand Up @@ -83,6 +91,14 @@ function send_to_model(
result = aigenerate(conv; model, api_kwargs = (; temperature), return_all = true)
return result
end
"Sends the conversation to the LLM."
function send_to_model(
any_template::Symbol; model::String, temperature::Float64 = 0.7, kwargs...)
@info ">> Sending AITemplate $(any_template) to LLM with temp $temperature"
result = aigenerate(
any_template; model, api_kwargs = (; temperature), return_all = true, kwargs...)
return result
end
"Sends the conversation to the Auto-Critic Template for evaluation and suggestions"
function send_to_model(critic_template::Symbol,
conv::AbstractVector{<:PT.AbstractMessage}; model::String, temperature::Float64 = 0.7)
Expand Down
Loading

0 comments on commit 7d52b63

Please sign in to comment.