Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
f6a89b1
feat(session): Implement lazy LLM agent startup
pcharbon70 Dec 19, 2025
ede6273
feat(tui): Add Frame widget for bordered content areas
pcharbon70 Dec 19, 2025
04b9519
feat(tui): Integrate TextInput into ConversationView with test fixes
pcharbon70 Dec 21, 2025
9c49bed
fix(tui): Fix ConversationView rendering on large terminals
pcharbon70 Dec 21, 2025
766b174
fix(tui): Improve terminal cleanup on quit
pcharbon70 Dec 21, 2025
5f35042
feat(tui): Change quit shortcut from Ctrl+C to Ctrl+D
pcharbon70 Dec 21, 2025
5cfea6e
fix(tui): Fix ConversationView width calculation
pcharbon70 Dec 21, 2025
b7d5d65
fix(tui): Fix ConversationView to fill full frame width
pcharbon70 Dec 21, 2025
5da858b
chore: Fix compilation warnings and test registry lookups
pcharbon70 Dec 21, 2025
7987ff5
feat(tui): Add frame border around sessions sidebar
pcharbon70 Dec 21, 2025
3485fe2
fix(tui): Fix Frame widget rendering on large terminals
pcharbon70 Dec 21, 2025
4635b40
feat(tui): Move frame to surround tab content only
pcharbon70 Dec 21, 2025
51ef65d
feat(tui): Update tab styling with connected folder design
pcharbon70 Dec 21, 2025
28fc1fb
style: Apply code formatting across lib and test files
pcharbon70 Dec 22, 2025
bff397a
feat(tui): Add mouse click handling for tabs and sidebar
pcharbon70 Dec 22, 2025
b57b4f9
fix(tui): Only handle mouse clicks, not hover/move events
pcharbon70 Dec 22, 2025
92e091b
feat(tui): Add auto-resume prompt for previous sessions
pcharbon70 Dec 22, 2025
1dcf997
feat(tui): Add per-session provider/model config with status bar display
pcharbon70 Dec 22, 2025
6ddb188
feat(tui): Add separator bar below status bar in tab content
pcharbon70 Dec 24, 2025
e7f74e7
feat(tui): Add status bar icons and processing state tracking
pcharbon70 Dec 24, 2025
a21ba14
refactor(tui): Move agent_status from global Model to per-session ui_…
pcharbon70 Dec 24, 2025
128cb94
feat(tui): Add mode bar below text input
pcharbon70 Dec 24, 2025
878adad
feat(tui): Add paste from clipboard support (Ctrl+V)
pcharbon70 Dec 24, 2025
af05a9b
feat(tui): Add text selection and copy in conversation view
pcharbon70 Dec 24, 2025
ae090d1
style: Apply code formatting across TUI modules
pcharbon70 Dec 24, 2025
852dab4
feat(llm-agent): Add metadata support for streaming responses
pcharbon70 Dec 25, 2025
0f76ca8
feat(tui): Add usage tracking system with token/cost display
pcharbon70 Dec 25, 2025
a589001
feat: Add programming language detection module
pcharbon70 Dec 25, 2025
99eb05b
feat: Integrate language detection into session and TUI
pcharbon70 Dec 25, 2025
bbd582f
feat(tui): Display language indicator in mode bar
pcharbon70 Dec 26, 2025
a0690c8
feat(tui): Add markdown rendering for conversation messages
pcharbon70 Dec 26, 2025
49a0441
fix(tui): Never truncate conversation messages
pcharbon70 Dec 26, 2025
5c40c64
feat(tui): Add syntax highlighting for Elixir code blocks
pcharbon70 Dec 26, 2025
65c23e2
feat(tui): Add interactive code block navigation with Tab/Enter
pcharbon70 Dec 26, 2025
0e1a5b6
fix(tui): Change code block navigation from Tab to Ctrl+B
pcharbon70 Dec 27, 2025
308a64f
feat(tui): Add per-session prompt history with arrow key navigation
pcharbon70 Dec 27, 2025
8aef030
feat(tui): Auto-save all sessions on Ctrl+X exit
pcharbon70 Dec 27, 2025
e6a4e30
chore: Reorganize ontology files and add research documents
pcharbon70 Dec 27, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion config/dev.exs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import Config

# Development-specific configuration
config :logger, level: :debug
# Set to :error to avoid log messages messing up the TUI
# Change to :debug or :info when debugging without the TUI
config :logger, level: :error
212 changes: 184 additions & 28 deletions lib/jido_code/agents/llm_agent.ex
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ defmodule JidoCode.Agents.LLMAgent do
alias Jido.AI.Model.Registry.Adapter, as: RegistryAdapter
alias Jido.AI.Prompt
alias JidoCode.Config
alias JidoCode.Language
alias JidoCode.PubSubTopics
alias JidoCode.Session.ProcessRegistry
alias JidoCode.Session.State, as: SessionState
Expand All @@ -58,7 +59,8 @@ defmodule JidoCode.Agents.LLMAgent do

# System prompt should NOT include user input to prevent prompt injection attacks.
# User messages are passed separately to the AI agent via chat_response/3.
@system_prompt """
# The base prompt is extended with language-specific instructions at runtime.
@base_system_prompt """
You are JidoCode, an expert coding assistant running in a terminal interface.

Your capabilities:
Expand All @@ -76,6 +78,9 @@ defmodule JidoCode.Agents.LLMAgent do
- Acknowledge limitations when you're uncertain
"""

# For backwards compatibility and non-session contexts
@system_prompt @base_system_prompt

# ============================================================================
# Client API
# ============================================================================
Expand Down Expand Up @@ -506,7 +511,8 @@ defmodule JidoCode.Agents.LLMAgent do
ai_pid: ai_pid,
config: config,
session_id: actual_session_id,
topic: build_topic(actual_session_id)
topic: build_topic(actual_session_id),
is_processing: false
}

{:ok, state}
Expand Down Expand Up @@ -539,6 +545,12 @@ defmodule JidoCode.Agents.LLMAgent do
{:noreply, state}
end

@impl true
def handle_info(:stream_complete, state) do
# Reset processing state when stream completes (success or failure)
{:noreply, %{state | is_processing: false}}
end

@impl true
def handle_call({:chat, message}, from, state) do
# ARCH-1 Fix: Use Task.Supervisor for monitored async tasks
Expand Down Expand Up @@ -576,8 +588,12 @@ defmodule JidoCode.Agents.LLMAgent do

@impl true
def handle_call(:get_status, _from, state) do
# ready is false when processing or when agent is not alive
agent_alive = is_pid(state.ai_pid) and Process.alive?(state.ai_pid)
ready = agent_alive and not state.is_processing

status = %{
ready: is_pid(state.ai_pid) and Process.alive?(state.ai_pid),
ready: ready,
config: state.config,
session_id: state.session_id,
topic: state.topic
Expand Down Expand Up @@ -627,23 +643,38 @@ defmodule JidoCode.Agents.LLMAgent do
topic = state.topic
config = state.config
session_id = state.session_id
agent_pid = self()

# ARCH-1 Fix: Use Task.Supervisor for monitored async streaming
Task.Supervisor.start_child(JidoCode.TaskSupervisor, fn ->
# Trap exits to prevent ReqLLM's internal cleanup tasks from crashing us
Process.flag(:trap_exit, true)

try do
do_chat_stream_with_timeout(config, message, topic, timeout, session_id)
# Notify agent that streaming is complete
send(agent_pid, :stream_complete)
catch
:exit, {:timeout, _} ->
Logger.warning("Stream timed out after #{timeout}ms")
broadcast_stream_error(topic, :timeout)
send(agent_pid, :stream_complete)

kind, reason ->
Logger.error("Stream failed: #{kind} - #{inspect(reason)}")
broadcast_stream_error(topic, {kind, reason})
send(agent_pid, :stream_complete)
end

# Drain any EXIT messages from ReqLLM cleanup tasks
receive do
{:EXIT, _pid, _reason} -> :ok
after
100 -> :ok
end
end)

{:noreply, state}
{:noreply, %{state | is_processing: true}}
end

@impl true
Expand Down Expand Up @@ -800,11 +831,11 @@ defmodule JidoCode.Agents.LLMAgent do
Phoenix.PubSub.broadcast(@pubsub, topic, {:stream_chunk, session_id, chunk})
end

defp broadcast_stream_end(topic, full_content, session_id) do
defp broadcast_stream_end(topic, full_content, session_id, metadata) do
# Finalize message in Session.State (skip if session_id is PID string)
end_session_streaming(session_id)
# Also broadcast for TUI (include session_id for routing)
Phoenix.PubSub.broadcast(@pubsub, topic, {:stream_end, session_id, full_content})
# Also broadcast for TUI (include session_id, content, and metadata for routing)
Phoenix.PubSub.broadcast(@pubsub, topic, {:stream_end, session_id, full_content, metadata})
end

defp broadcast_stream_error(topic, reason) do
Expand Down Expand Up @@ -851,11 +882,14 @@ defmodule JidoCode.Agents.LLMAgent do
end

defp execute_stream(model, message, topic, session_id) do
# Build dynamic system prompt with language-specific instructions
system_prompt = build_system_prompt(session_id)

# Build prompt with system message and user message
prompt =
Prompt.new(%{
messages: [
%{role: :system, content: @system_prompt, engine: :none},
%{role: :system, content: system_prompt, engine: :none},
%{role: :user, content: message, engine: :none}
]
})
Expand Down Expand Up @@ -884,33 +918,120 @@ defmodule JidoCode.Agents.LLMAgent do
end
end

defp process_stream(stream, topic, session_id) do
# Accumulate full content while streaming chunks
defp process_stream(stream_response, topic, session_id) do
# Extract inner stream and metadata_task from StreamResponse
{actual_stream, metadata_task} =
case stream_response do
%ReqLLM.StreamResponse{stream: inner, metadata_task: task} when inner != nil ->
{inner, task}

%ReqLLM.StreamResponse{metadata_task: task} ->
Logger.warning("LLMAgent: StreamResponse has nil stream field")
{[], task}

other ->
# Not a StreamResponse, just a raw stream
{other, nil}
end

# Accumulate full content while streaming chunks (catch handles ReqLLM process cleanup race conditions)
full_content =
Enum.reduce_while(stream, "", fn chunk, acc ->
case extract_chunk_content(chunk) do
{:ok, content} ->
broadcast_stream_chunk(topic, content, session_id)
{:cont, acc <> content}

{:finish, content} ->
# Last chunk with finish_reason
if content != "" do
broadcast_stream_chunk(topic, content, session_id)
end

{:halt, acc <> content}
end
end)
try do
Enum.reduce_while(actual_stream, "", fn chunk, acc ->
case extract_chunk_content(chunk) do
{:ok, content} ->
# Only broadcast non-empty content
if content != "" do
broadcast_stream_chunk(topic, content, session_id)
end

{:cont, acc <> content}

{:finish, content} ->
if content != "" do
broadcast_stream_chunk(topic, content, session_id)
end

{:halt, acc <> content}
end
end)
rescue
e in Protocol.UndefinedError ->
Logger.error("LLMAgent: Protocol error during enumeration: #{inspect(e)}")
broadcast_stream_error(topic, e)
""

e ->
Logger.error("LLMAgent: Error during enumeration: #{inspect(e)}")
broadcast_stream_error(topic, e)
""
catch
:exit, {:noproc, _} ->
# StreamServer died - this is expected at end of stream due to ReqLLM cleanup
Logger.debug("LLMAgent: Stream server terminated (normal cleanup)")
""

:exit, reason ->
Logger.warning("LLMAgent: Stream exited: #{inspect(reason)}")
""
end

# Await metadata from the stream (usage info, finish_reason, etc.)
# This keeps the StreamServer alive until metadata is collected
metadata = await_stream_metadata(metadata_task)

# Log usage information if available
if metadata[:usage] do
Logger.info("LLMAgent: Token usage - #{inspect(metadata[:usage])}")
end

# Broadcast stream completion and finalize in Session.State
broadcast_stream_end(topic, full_content, session_id)
broadcast_stream_end(topic, full_content, session_id, metadata)
rescue
error ->
Logger.error("Stream processing error: #{inspect(error)}")
broadcast_stream_error(topic, error)
end

# Await metadata from the stream's metadata task
# Returns empty map if task is nil or fails
defp await_stream_metadata(nil), do: %{}

defp await_stream_metadata(task) do
try do
# Await with reasonable timeout (10 seconds)
Task.await(task, 10_000)
catch
:exit, {:timeout, _} ->
Logger.warning("LLMAgent: Metadata task timed out")
%{}

:exit, reason ->
Logger.debug("LLMAgent: Metadata task exited: #{inspect(reason)}")
%{}
end
end

# ReqLLM.StreamChunk format - content type with text field
defp extract_chunk_content(%ReqLLM.StreamChunk{type: :content, text: text}) do
{:ok, text || ""}
end

# ReqLLM.StreamChunk format - meta type signals end of stream
defp extract_chunk_content(%ReqLLM.StreamChunk{type: :meta, metadata: metadata}) do
if Map.get(metadata, :finish_reason) do
{:finish, ""}
else
{:ok, ""}
end
end

# ReqLLM.StreamChunk format - thinking/tool_call types (skip for now)
defp extract_chunk_content(%ReqLLM.StreamChunk{type: _type}) do
{:ok, ""}
end

# Legacy format - content with finish_reason
defp extract_chunk_content(%{content: content, finish_reason: nil}) do
{:ok, content || ""}
end
Expand All @@ -919,6 +1040,7 @@ defmodule JidoCode.Agents.LLMAgent do
{:finish, content || ""}
end

# Legacy format - delta content
defp extract_chunk_content(%{delta: %{content: content}} = chunk) do
finish_reason = Map.get(chunk, :finish_reason)

Expand All @@ -934,8 +1056,8 @@ defmodule JidoCode.Agents.LLMAgent do
end

defp extract_chunk_content(chunk) do
# Unknown chunk format - try to extract content
content = Map.get(chunk, :content) || Map.get(chunk, "content") || ""
# Unknown chunk format - try to extract content or text
content = Map.get(chunk, :content) || Map.get(chunk, :text) || Map.get(chunk, "content") || ""
{:ok, content}
end

Expand Down Expand Up @@ -1022,6 +1144,40 @@ defmodule JidoCode.Agents.LLMAgent do
not String.starts_with?(session_id, "#PID<")
end

# ============================================================================
# System Prompt Building
# ============================================================================

# Build the system prompt with optional language-specific instructions
defp build_system_prompt(session_id) when is_binary(session_id) do
if is_valid_session_id?(session_id) do
case SessionState.get_state(session_id) do
{:ok, %{session: session}} when not is_nil(session.language) ->
add_language_instruction(@base_system_prompt, session.language)

_ ->
@base_system_prompt
end
else
@base_system_prompt
end
end

defp build_system_prompt(_), do: @base_system_prompt

# Add language-specific instruction to the system prompt
defp add_language_instruction(base_prompt, language) do
lang_name = Language.display_name(language)

language_instruction = """

Language Context:
This project uses #{lang_name}. When providing code snippets, write them in #{lang_name} unless a different language is specifically requested.
"""

base_prompt <> language_instruction
end

# ============================================================================
# Validation Functions
# ============================================================================
Expand Down
Loading
Loading