-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yaml
More file actions
42 lines (39 loc) · 1.26 KB
/
docker-compose.yaml
File metadata and controls
42 lines (39 loc) · 1.26 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
version: "3.9"
services:
codingai-api:
build:
context: .
dockerfile: ai-agent/Dockerfile
container_name: codingai-api
ports:
- "${CODINGAI_HTTP_PORT:-8000}:8000"
environment:
- PORT=8000
- TARGET_REPOS=${TARGET_REPOS:-}
- RUN_ALL_REPOS=${RUN_ALL_REPOS:-false}
- LLM_PROVIDER=${LLM_PROVIDER:-openai}
- LLM_PROVIDER_ORDER=${LLM_PROVIDER_ORDER:-local,openai}
- LLM_FALLBACK_ENABLED=${LLM_FALLBACK_ENABLED:-true}
- LLM_REQUIRE_HEALTHY=${LLM_REQUIRE_HEALTHY:-true}
- LOCAL_LLM_BASE_URL=${LOCAL_LLM_BASE_URL:-http://ollama:11434}
- OBS_EVENT_LOG_ENABLED=${OBS_EVENT_LOG_ENABLED:-true}
volumes:
- ./ai-agent/.env:/app/ai-agent/.env
- ./ai-agent/.agent:/app/ai-agent/.agent
- ./ai-agent/github_app:/app/ai-agent/github_app
- ./ai-agent/logs:/app/ai-agent/logs
- ./ai-agent/state.json:/app/ai-agent/state.json
- ./ai-agent/state.db:/app/ai-agent/state.db
- ./workspaces:/app/workspaces
restart: unless-stopped
ollama:
image: ollama/ollama
container_name: codingai-ollama
profiles: ["local-llm"]
ports:
- "${OLLAMA_PORT:-11434}:11434"
volumes:
- ollama-data:/root/.ollama
restart: unless-stopped
volumes:
ollama-data: