Skip to content

Commit 5ddd043

Browse files
committed
templates: Add a multi-model Ollama example to templates
1 parent 57a3c31 commit 5ddd043

File tree

4 files changed

+188
-1
lines changed

4 files changed

+188
-1
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
{
2+
"type": "app",
3+
"id": "chat-ai-ollama",
4+
"title": "Chat with local models using Ollama"
5+
}
Lines changed: 178 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,178 @@
1+
# ------------------------------------------------------------------------------------
2+
# Shiny Chat with local models, powered by Ollama
3+
# ------------------------------------------------------------------------------------
4+
5+
# ChatOllama() requires an Ollama model server to be running locally.
6+
# See the docs for more information on how to set up a local Ollama server.
7+
# https://posit-dev.github.io/chatlas/reference/ChatOllama.html
8+
import ollama
9+
from chatlas import ChatOllama
10+
11+
from shiny import reactive, req
12+
from shiny.express import input, render, ui
13+
from shiny.reactive import ExtendedTask
14+
15+
# Get installed models and choose an initial model for the chat
16+
models_all = [m.model for m in ollama.list().models]
17+
18+
# Pick smallest llama3 if available, or default to the most recent model
19+
default_model = models_all[0]
20+
models_all.sort()
21+
if any([x.startswith("llama3") for x in models_all]):
22+
default_model = [x for x in models_all if x.startswith("llama3")][0]
23+
24+
25+
# Set some Shiny page options
26+
ui.page_opts(
27+
title="Hello Ollama Chat",
28+
fillable=True,
29+
fillable_mobile=True,
30+
)
31+
32+
with ui.sidebar(title="Chat options"):
33+
ui.input_select(
34+
"model",
35+
"Model",
36+
choices=models_all,
37+
selected=default_model,
38+
)
39+
with ui.div():
40+
ui.input_slider("temperature", "Creativity", min=0, max=1, value=0.5, step=0.25)
41+
with ui.help_text(style="text-align: right;"):
42+
43+
@render.text
44+
@reactive.event(input.temperature)
45+
def text_creativity():
46+
if input.temperature() < 0.25:
47+
return "No creativity"
48+
elif input.temperature() < 0.5:
49+
return "Low creativity"
50+
elif input.temperature() < 0.75:
51+
return "Medium creativity"
52+
elif input.temperature() < 1:
53+
return "High creativity"
54+
else:
55+
return "Max creativity"
56+
57+
ui.input_action_button("edit_last", "Edit last message", disabled=True)
58+
ui.input_action_button("clear", "Reset chat")
59+
60+
61+
# Create and display a Shiny chat component
62+
chat = ui.Chat(
63+
id="chat",
64+
messages=["Hello! How can I help you today?"],
65+
)
66+
67+
chat.ui()
68+
69+
chat_client = reactive.value[ChatOllama](None)
70+
71+
72+
async def cancel_chat_stream(chat: ChatOllama, stream: ExtendedTask):
73+
if stream is not None and stream.status() == "running":
74+
# Cancel current stream
75+
stream.cancel()
76+
# Tell chat that the message is complete
77+
stream_id = chat._current_stream_id
78+
await chat._append_message(
79+
{
80+
"type": "assistant",
81+
"content": "... [cancelled].",
82+
},
83+
chunk="end",
84+
stream_id=stream_id,
85+
)
86+
await chat.append_status_message("In-progress response was cancelled.", type="static")
87+
88+
89+
@reactive.effect
90+
@reactive.event(input.model)
91+
async def change_model():
92+
if chat_client.get() is None:
93+
client = ChatOllama(model=input.model())
94+
await chat.append_status_message(
95+
ui.HTML(f"Using model <code>{input.model()}</code>"), type="static"
96+
)
97+
else:
98+
stream = streaming_task.get()
99+
await cancel_chat_stream(chat, stream)
100+
101+
# TODO: Turns are broken when you cancel an in-progress stream
102+
turns = chat_client.get().get_turns()
103+
client = ChatOllama(model=input.model(), turns=turns)
104+
await chat.append_status_message(
105+
ui.HTML(f"Model switched to <code>{input.model()}</code>"), type="dynamic"
106+
)
107+
108+
chat_client.set(client)
109+
110+
111+
streaming_task = reactive.value[ExtendedTask | None](None)
112+
113+
114+
# Generate a response when the user submits a message
115+
@chat.on_user_submit
116+
async def handle_user_input(user_input: str):
117+
response = chat_client.get().stream(
118+
user_input, kwargs={"temperature": input.temperature()}
119+
)
120+
task = await chat.append_message_stream(response)
121+
streaming_task.set(task)
122+
123+
124+
@reactive.effect
125+
@reactive.event(input.clear)
126+
async def reset_chat():
127+
stream = streaming_task.get()
128+
if not isinstance(stream, ExtendedTask):
129+
return
130+
131+
is_streaming = stream.status() == "running"
132+
133+
if is_streaming:
134+
await cancel_chat_stream(chat, stream)
135+
else:
136+
await chat.clear_messages()
137+
await chat.append_message("Hello! How can I help you today?")
138+
chat_client.set(ChatOllama(model=input.model()))
139+
await chat.append_status_message(ui.HTML(f"Using model <code>{input.model()}</code>"))
140+
141+
142+
@reactive.effect
143+
def toggle_last_message_button():
144+
task = streaming_task.get()
145+
if not isinstance(task, ExtendedTask):
146+
return
147+
148+
is_streaming = task.status() == "running"
149+
ui.update_action_button("edit_last", disabled=is_streaming)
150+
ui.update_action_button(
151+
"clear", label="Cancel chat" if is_streaming else "Reset chat"
152+
)
153+
154+
155+
@reactive.effect
156+
@reactive.event(input.edit_last)
157+
async def edit_last_message():
158+
req(streaming_task.get().status() != "streaming")
159+
160+
messages = chat.messages()
161+
req(len(messages) > 1)
162+
163+
# Find the index of the last user message
164+
last_user_index = next(
165+
(i for i, msg in enumerate(messages) if msg["role"] == "user"), None
166+
)
167+
if last_user_index is None:
168+
raise ValueError("No user messages found")
169+
170+
last_user_msg = messages[last_user_index]["content"]
171+
messages = messages[:last_user_index] # Keep only messages before last user message
172+
173+
# Reset chat UI state prior to last user message
174+
await chat.clear_messages()
175+
for message in messages:
176+
await chat.append_message(message)
177+
178+
chat.update_user_input(value=last_user_msg, focus=True)
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
shiny
2+
tokenizers
3+
chatlas
4+
ollama
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
22
"type": "app",
33
"id": "chat-ai-ollama",
4-
"title": "Chat AI using Ollama"
4+
"title": "Basic Chat AI using Ollama"
55
}

0 commit comments

Comments
 (0)