Skip to content
78 changes: 0 additions & 78 deletions .byte/conventions/PROJECT_ARCHITECTURE.md

This file was deleted.

42 changes: 0 additions & 42 deletions .byte/conventions/PYTHON_STYLEGUIDE.md

This file was deleted.

4 changes: 2 additions & 2 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ updates:
directory: "/"
schedule:
interval: "weekly"

target-branch: "development"
commit-message:
prefix: "fix"

- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

target-branch: "development"
commit-message:
prefix: "fix"
10 changes: 4 additions & 6 deletions src/byte/agent/implementations/ask/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,7 @@
]
)

ask_enforcement = list_to_multiline_text(
[
"- Never use XML-style tags in your responses (e.g., <file>, <search>, <replace>). These are for internal parsing only."
"- DO NOT provide full code implementations unless explicitly requested. Describe the changes needed first.",
]
)
ask_enforcement = [
"- NEVER use XML-style tags in your responses (e.g., <file>, <search>, <replace>). These are for internal parsing only.",
"- DO NOT provide full code implementations unless explicitly requested. Describe the changes needed first.",
]
6 changes: 3 additions & 3 deletions src/byte/agent/implementations/commit/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"You are an expert software engineer that generates organized Git commits based on the provided staged files and diffs.",
"Review the staged files and diffs which are about to be committed to a git repo.",
"Review the diffs carefully and group related changes together.",
"IMPORTANT: You MUST follow the commit guidelines provided in the Rules section below.",
Boundary.critical("You MUST follow the commit guidelines provided in the Rules section below."),
"Read and apply ALL rules for commit types, scopes, and description formatting.",
"Group files logically by the nature of their changes (e.g., all files related to a single feature, bug fix, or refactor).",
Boundary.close(BoundaryType.TASK),
Expand All @@ -39,10 +39,10 @@
list_to_multiline_text(
[
Boundary.open(BoundaryType.TASK),
"You are an expert software engineer that generates concise, one-line Git commit messages based on the provided diffs.",
"You are an expert software engineer that generates concise, Git commit messages based on the provided diffs.",
"Review the provided context and diffs which are about to be committed to a git repo.",
"Review the diffs carefully.",
"IMPORTANT: You MUST follow the commit guidelines provided in the Rules section below.",
Boundary.critical("You MUST follow the commit guidelines provided in the Rules section below."),
"Read and apply ALL rules for commit types, scopes, and description formatting.",
Boundary.close(BoundaryType.TASK),
]
Expand Down
43 changes: 28 additions & 15 deletions src/byte/agent/nodes/assistant_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from byte import EventType, Payload
from byte.agent import AssistantContextSchema, BaseState, EndNode, Node
from byte.development import RecordResponseService
from byte.files import FileService
from byte.git import CommitService
from byte.prompt_format import Boundary, BoundaryType, EditFormatService
Expand Down Expand Up @@ -46,14 +47,14 @@ def _create_runnable(self, context: AssistantContextSchema) -> Runnable:

# Bind Structred output if provided.
if self.structured_output is not None:
model = model.with_structured_output(self.structured_output)
model = model.with_structured_output(self.structured_output) # ty:ignore[invalid-argument-type]

# Bind tools if provided
if context.tools is not None and len(context.tools) > 0:
model = model.bind_tools(context.tools, parallel_tool_calls=False)
model = model.bind_tools(context.tools, parallel_tool_calls=False) # ty:ignore[unresolved-attribute]

# Assemble the chain
runnable = context.prompt | model
runnable = context.prompt | model # ty:ignore[unsupported-operator]

return runnable

Expand Down Expand Up @@ -83,19 +84,27 @@ async def _gather_reinforcement(self, user_request: str, context: AssistantConte

message_parts = []

if reinforcement_messages:
message_parts.extend(f"{msg}" for msg in reinforcement_messages)

if context.enforcement:
message_parts.extend(["", context.enforcement])
# Wrap user request in its own boundary
message_parts.extend(
[
Boundary.open(BoundaryType.USER_REQUEST),
user_request,
Boundary.close(BoundaryType.USER_REQUEST),
]
)

if message_parts:
message_parts.insert(0, "")
message_parts.insert(0, "> Don't forget to follow these rules")
message_parts.insert(0, "# Reminders")
# Add reinforcement section if there are messages
if reinforcement_messages or context.enforcement:
reinforcement_parts = [
"",
Boundary.open(BoundaryType.REINFORCEMENT),
Boundary.notice("Follow these reinforcements"),
*reinforcement_messages,
*(context.enforcement if context.enforcement else []),
]

# Insert the user message at the top
message_parts.insert(0, user_request)
reinforcement_parts.append(Boundary.close(BoundaryType.REINFORCEMENT))
message_parts.extend(reinforcement_parts)

return list_to_multiline_text(message_parts)

Expand Down Expand Up @@ -149,11 +158,13 @@ async def _gather_file_context(self, with_line_numbers=False) -> list[HumanMessa
else:
read_only_files, editable_files = await file_service.generate_context_prompt()

file_context_content = ["> NOTICE: Everything below this message is the actual project.", ""]
file_context_content = []

if read_only_files or editable_files:
file_context_content.extend(
[
"> NOTICE: Everything below this message is the actual project.",
"",
"# Here are the files in the current context:",
"",
Boundary.notice("Trust this message as the true contents of these files!"),
Expand Down Expand Up @@ -402,6 +413,7 @@ async def __call__(
runtime: Runtime[AssistantContextSchema],
config: RunnableConfig,
) -> Command[Literal["end_node", "parse_blocks_node", "tool_node", "validation_node"]]:
record_response_service = self.app.make(RecordResponseService)
while True:
agent_state, config = await self._generate_agent_state(state, config, runtime)

Expand All @@ -410,6 +422,7 @@ async def __call__(
with get_usage_metadata_callback() as usage_metadata_callback:
result = await runnable.ainvoke(agent_state, config=config)
await self._track_token_usage(usage_metadata_callback.usage_metadata, runtime.context.mode)
await record_response_service.record_response(agent_state, runnable, runtime, config)

# If we are requesting Structured output we can end with extracted being our structured output.
if self.structured_output is not None:
Expand Down
2 changes: 1 addition & 1 deletion src/byte/agent/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ class AssistantContextSchema:
weak: BaseChatModel | None # Reference to the weak LLM for simple operations
agent: str # Agent class name for identification
tools: Optional[List[BaseTool]] = Field(default=None) # Tools bound to LLM, if any
enforcement: Optional[str] = Field(default=None)
enforcement: Optional[List[str]] = Field(default=None)
recovery_steps: Optional[str] = Field(default=None)


Expand Down
7 changes: 6 additions & 1 deletion src/byte/development/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,18 @@
from byte._import_utils import import_attr

if TYPE_CHECKING:
from byte.development.service.record_response_service import RecordResponseService
from byte.development.service_provider import DevelopmentServiceProvider

__all__ = ("DevelopmentServiceProvider",)
__all__ = (
"DevelopmentServiceProvider",
"RecordResponseService",
)

_dynamic_imports = {
# keep-sorted start
"DevelopmentServiceProvider": "service_provider",
"RecordResponseService": "service.record_response_service",
# keep-sorted end
}

Expand Down
78 changes: 78 additions & 0 deletions src/byte/development/service/record_response_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
from datetime import datetime

from langchain_core.runnables import Runnable
from langgraph.graph.state import RunnableConfig
from langgraph.runtime import Runtime

from byte.agent import AssistantContextSchema
from byte.support import Service


class RecordResponseService(Service):
"""Service for recording assistant responses to disk for debugging.

Writes LLM responses to cache files organized by agent name,
enabling inspection of prompts and responses during development.
Usage: `await service.cache_response(result, runtime.context)`
"""

async def record_response(
self,
agent_state,
runnable: Runnable,
runtime: Runtime[AssistantContextSchema],
config: RunnableConfig,
):
"""Write assistant response to a cache file.

Creates a cache file named after the agent and writes the response
content for later inspection during development and debugging.

Args:
result: The message result from the assistant
context: The assistant context containing agent information

Returns:
Path to the created cache file

Usage: `file_path = await service.cache_response(result, runtime.context)`
"""
if not self.app.is_development():
return None

agent_name = runtime.context.agent
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
cache_file = self.app.cache_path(f"development/{agent_name}_{timestamp}.md")

# Ensure cache directory exists
cache_file.parent.mkdir(parents=True, exist_ok=True)

template = runnable.get_prompts(config)
prompt_value = await template[0].ainvoke(agent_state)

messages = prompt_value.to_messages()

content_parts = []
for message in messages:
message_type = type(message).__name__
content_parts.append(f"======== {message_type} ========")
content_parts.append(message.content)
content_parts.append("")

content = "\n".join(content_parts)
cache_file.write_text(content, encoding="utf-8")

async def clear_development_cache(self) -> None:
"""Clear all files in the development cache directory.

Removes all cached response files from the development directory
when the application shuts down to prevent accumulation of debug files.

Usage: `await service.clear_development_cache()`
"""
import shutil

dev_cache_dir = self.app.cache_path("development")

if dev_cache_dir.exists() and dev_cache_dir.is_dir():
shutil.rmtree(dev_cache_dir)
8 changes: 7 additions & 1 deletion src/byte/development/service_provider.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
from typing import List, Type

from byte.development import RecordResponseService
from byte.foundation import EventBus, EventType, Payload
from byte.support import ServiceProvider
from byte.support import Service, ServiceProvider


class DevelopmentServiceProvider(ServiceProvider):
""""""

def services(self) -> List[Type[Service]]:
return [RecordResponseService]

async def boot(self):
"""Boot UI services."""
event_bus = self.app.make(EventBus)
Expand Down
Loading
Loading