# Using PromptBuddy
```elixir
Mix.install([
{:kino_promptbuddy, github: "fredguth/kino_promptbuddy"},
{:kino, "~>0.17.0"},
])
```
[](https://livebook.dev/run?url=https%3A%2F%2Fgist.github.com%2Ffredguth%2Ffd500b32ccfa864e90907b0e418adeb6)
# Using PromptBuddy
## The context is all precedent cells
### No context
<!-- livebook:{"attrs":"eyJjZWxsX2lkIjoibm9maWxlIiwibW9kZWwiOiJvcGVucm91dGVyOmFudGhyb3BpYy9jbGF1ZGUtaGFpa3UtNC41Iiwibl9ldmVyeSI6MjQsInNlc3Npb25faWQiOiIzNTZtZGd0Z2xla2RpYnhwaWN4Z3BnZDQ1aWxhM252aTVxNnpua2s2aW9ydnk3ZGkiLCJzb3VyY2UiOiIifQ","chunks":null,"kind":"Elixir.Kino.PromptBuddy","livebook_object":"smart_cell"} -->
```elixir
alias Kino.PromptBuddy.Context
model = "openrouter:anthropic/claude-haiku-4.5"
n_every = 24
session_id = "356mdgtglekdibxpicxgpgd45ila3nvi5q6znkk6iorvy7di"
current_cell_id = Context.get_current_cell_id()
user_text = ""
smart_cell_pid = Process.whereis(:"promptbuddy_#{"nofile"}")
import Kino.Shorts
outer = frame()
body = frame()
chat_history = Kino.PromptBuddy.get_history(current_cell_id)
prompt_blank? = String.trim(user_text) == ""
previous_msgs = Kino.PromptBuddy.history_markdown(chat_history)
current_prompt_header = Kino.Markdown.new("**You**:")
current_prompt_body = Kino.Markdown.new(user_text)
buddy_header = Kino.Markdown.new("**Buddy**:")
Kino.Frame.render(
outer,
Kino.Layout.grid(
previous_msgs ++ [current_prompt_header, current_prompt_body, buddy_header, body]
)
)
unless prompt_blank? do
Task.start(fn ->
Process.sleep(100)
if smart_cell_pid do
send(smart_cell_pid, {:clear_editor, current_cell_id})
end
end)
system_msg =
ReqLLM.Context.system(
"You are a patient pair-programming partner using **Polya's method** / **Socratic** style.\nPRIORITY: (1) Answer only the final PROMPT, (2) be brief, (3) one code fence if needed.\n"
)
prompt_msg = ReqLLM.Context.user("--- BEGIN PROMPT ---
#{user_text}
--- END PROMPT ---
")
precedent_msgs =
case Context.get_notebook(session_id) do
{:ok, nb} -> Context.build_precedent_messages(nb, current_cell_id)
_ -> []
end
history_msgs = Kino.PromptBuddy.history_to_messages(chat_history)
messages = [system_msg] ++ precedent_msgs ++ history_msgs ++ [prompt_msg]
Task.start(fn ->
Kino.PromptBuddy.stream_response_and_update_history(
model,
messages,
body,
outer,
user_text,
chat_history,
current_cell_id,
n_every
)
end)
end
outer
```
My name is Fred
<!-- livebook:{"break_markdown":true} -->
### Now, it knows my name
<!-- livebook:{"attrs":"eyJjZWxsX2lkIjoibm9maWxlIiwibW9kZWwiOiJvcGVucm91dGVyOmFudGhyb3BpYy9jbGF1ZGUtc29ubmV0LTQuNSIsIm5fZXZlcnkiOjI0LCJzZXNzaW9uX2lkIjoiMzU2bWRndGdsZWtkaWJ4cGljeGdwZ2Q0NWlsYTNudmk1cTZ6bmtrNmlvcnZ5N2RpIiwic291cmNlIjoiV2hhdCBpcyBwcm9tcHQgYnVkZHk/In0","chunks":null,"kind":"Elixir.Kino.PromptBuddy","livebook_object":"smart_cell"} -->
```elixir
alias Kino.PromptBuddy.Context
model = "openrouter:anthropic/claude-sonnet-4.5"
n_every = 24
session_id = "356mdgtglekdibxpicxgpgd45ila3nvi5q6znkk6iorvy7di"
current_cell_id = Context.get_current_cell_id()
user_text = "What is prompt buddy?"
smart_cell_pid = Process.whereis(:"promptbuddy_#{"nofile"}")
import Kino.Shorts
outer = frame()
body = frame()
chat_history = Kino.PromptBuddy.get_history(current_cell_id)
prompt_blank? = String.trim(user_text) == ""
previous_msgs = Kino.PromptBuddy.history_markdown(chat_history)
current_prompt_header = Kino.Markdown.new("**You**:")
current_prompt_body = Kino.Markdown.new(user_text)
buddy_header = Kino.Markdown.new("**Buddy**:")
Kino.Frame.render(
outer,
Kino.Layout.grid(
previous_msgs ++ [current_prompt_header, current_prompt_body, buddy_header, body]
)
)
unless prompt_blank? do
Task.start(fn ->
Process.sleep(100)
if smart_cell_pid do
send(smart_cell_pid, {:clear_editor, current_cell_id})
end
end)
system_msg =
ReqLLM.Context.system(
"You are a patient pair-programming partner using **Polya's method** / **Socratic** style.\nPRIORITY: (1) Answer only the final PROMPT, (2) be brief, (3) one code fence if needed.\n"
)
prompt_msg = ReqLLM.Context.user("--- BEGIN PROMPT ---
#{user_text}
--- END PROMPT ---
")
precedent_msgs =
case Context.get_notebook(session_id) do
{:ok, nb} -> Context.build_precedent_messages(nb, current_cell_id)
_ -> []
end
history_msgs = Kino.PromptBuddy.history_to_messages(chat_history)
messages = [system_msg] ++ precedent_msgs ++ history_msgs ++ [prompt_msg]
Task.start(fn ->
Kino.PromptBuddy.stream_response_and_update_history(
model,
messages,
body,
outer,
user_text,
chat_history,
current_cell_id,
n_every
)
end)
end
outer
```
<!-- livebook:{"output":true} -->
```
13:21:19.296 [debug] Finch streaming completed successfully
```
<!-- livebook:{"offset":4757,"stamp":{"token":"XCP.hvqb2_6_w8iYuIrjjADkmfA_s37Bp0ovZhkYQufLXLDPF6oriEv-8UfpMrroTHIw8BH2VHfPOYK5ogBy5PEBNGAaKQvvnWuxXwp8eHoJrW21n6rny7Dmb-5I3ghO","version":2}} -->