Skip to content

Commit d400e47

Browse files
committed
chore: show model selection first when reloading
1 parent 49998d3 commit d400e47

File tree

1 file changed

+5
-7
lines changed
  • shiny/templates/chat/llms/ollama-multi

1 file changed

+5
-7
lines changed

shiny/templates/chat/llms/ollama-multi/app.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,8 @@ def text_creativity():
5959

6060

6161
# Create and display a Shiny chat component
62-
chat = ui.Chat(
63-
id="chat",
64-
messages=["Hello! How can I help you today?"],
65-
)
62+
INITIAL_MESSAGE = "Hello! How can I help you today?"
63+
chat = ui.Chat(id="chat", messages=[INITIAL_MESSAGE])
6664

6765
chat.ui()
6866

@@ -87,7 +85,7 @@ async def cancel_chat_stream(chat: ChatOllama, stream: ExtendedTask):
8785
ui.div(
8886
"The previous response was cancelled.",
8987
class_="alert alert-warning",
90-
style="margin-inline: auto; width: max-content;"
88+
style="margin-inline: auto; width: max-content;",
9189
),
9290
type="static",
9391
)
@@ -99,7 +97,7 @@ async def change_model():
9997
if chat_client.get() is None:
10098
client = ChatOllama(model=input.model())
10199
await chat.append_status_message(
102-
ui.HTML(f"Using model <code>{input.model()}</code>"), type="static"
100+
ui.HTML(f"Using model <code>{input.model()}</code>"), type="dynamic"
103101
)
104102
else:
105103
stream = streaming_task.get()
@@ -141,11 +139,11 @@ async def reset_chat():
141139
await cancel_chat_stream(chat, stream)
142140
else:
143141
await chat.clear_messages()
144-
await chat.append_message("Hello! How can I help you today?")
145142
chat_client.set(ChatOllama(model=input.model()))
146143
await chat.append_status_message(
147144
ui.HTML(f"Using model <code>{input.model()}</code>")
148145
)
146+
await chat.append_message(INITIAL_MESSAGE)
149147

150148

151149
@reactive.effect

0 commit comments

Comments
 (0)