Skip to content

Commit 841483a

Browse files
author
Iyappan Subramanian
committed
Added Agentic AI
- Added Agentic AI with one workflow - Fixed network config - Updated docker tag
1 parent acb9425 commit 841483a

File tree

3 files changed

+110
-18
lines changed

3 files changed

+110
-18
lines changed

Dockerfile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,9 @@ COPY install-docker.sh .
1111
RUN ./install-docker.sh
1212

1313
FROM base AS runtime
14-
COPY ./static ./static
1514
COPY compose.yaml app.py .
15+
COPY ./static ./static
16+
COPY ./searxng ./searxng
1617

1718
#CMD ["python", "launcher/launcher.py"]
1819
CMD ["python", "-u", "app.py"]

app.py

Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,10 @@
99
"service_name": "llmchat_demo_service",
1010
"url": "http://localhost:7861"
1111
},
12+
"Agentic AI": {
13+
"service_name": "agentic_ai_demo_service",
14+
"url": "http://localhost:7864"
15+
},
1216
"Object Detection (YOLO)": {
1317
"service_name": "yolo_demo_service",
1418
"url": "http://localhost:7862"
@@ -32,6 +36,14 @@ def get_running_container(service_name):
3236
except docker.errors.NotFound:
3337
return None
3438

39+
def stop_dependent_container(cont_name):
40+
container_to_stop = get_running_container(cont_name)
41+
if container_to_stop:
42+
print(f"Stopping {cont_name}...")
43+
container_to_stop.stop()
44+
container_to_stop.remove() # Remove to ensure a clean start next time
45+
print(f"{cont_name} stopped.")
46+
3547
def stop_all_demos():
3648
"""Stops any running demo container managed by this launcher."""
3749
global current_container
@@ -46,13 +58,13 @@ def stop_all_demos():
4658

4759
# Stop and remove dependent ollama container
4860
if demo_info['service_name'] == 'llmchat_demo_service':
49-
cont_name = 'ollama_demo_service'
50-
container_to_stop = get_running_container(cont_name)
51-
if container_to_stop:
52-
print(f"Stopping {cont_name}...")
53-
container_to_stop.stop()
54-
container_to_stop.remove() # Remove to ensure a clean start next time
55-
print(f"{cont_name} stopped.")
61+
dep_cont_name = 'ollama_demo_service'
62+
stop_dependent_container(dep_cont_name)
63+
elif demo_info['service_name'] == 'agentic_ai_demo_service':
64+
dep_cont_name = 'ollama_for_agent_service'
65+
stop_dependent_container(dep_cont_name)
66+
dep_cont_name = 'searxng'
67+
stop_dependent_container(dep_cont_name)
5668

5769
current_container = None
5870
return "All demos have been stopped."

compose.yaml

Lines changed: 89 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
services:
22
# The main entry point for the user
33
launcher:
4-
build: .
5-
image: ghcr.io/amperecomputingai/ai-playground:0.2
4+
build:
5+
context: .
6+
dockerfile: Dockerfile
7+
image: ghcr.io/amperecomputingai/ai-playground:0.3-rc1
68
container_name: demo_launcher
79
ports:
810
- "7860:7860" # Expose the Gradio UI on port 7860
11+
networks:
12+
- public
913
volumes:
10-
# Mount the Docker socket to allow this container to control others
1114
- /var/run/docker.sock:/var/run/docker.sock
1215

1316
yolo_demo_service:
@@ -26,10 +29,10 @@ services:
2629
VIDEO_SRC: ${VIDEO_SRC:-''}
2730
WEBCAM0_SRC: ${WEBCAM0_SRC:-0}
2831
WEBCAM1_SRC: ${WEBCAM1_SRC:-2}
29-
#network_mode: host
3032
ports:
31-
#- "${HOST_PORT}:${GRADIO_SERVER_PORT:-7861}"
3233
- "7862:7862"
34+
networks:
35+
- public
3336

3437
whisper_demo_service:
3538
image: ghcr.io/amperecomputingai/ampere-ai-ref-apps:whisper-0.3.10rc1
@@ -46,18 +49,21 @@ services:
4649
NTHREADS: ${NTHREADS:-64}
4750
AIO_NUM_THREADS: ${AIO_NUM_THREADS:-64}
4851
ports:
49-
#- "${HOST_PORT}:${GRADIO_SERVER_PORT:-5001}"
5052
- "7863:7863"
53+
networks:
54+
- public
5155

5256
ollama_demo_service:
5357
image: ghcr.io/amperecomputingai/ollama-ampere:1.0.0-ol9
5458
container_name: ollama_demo_service
5559
volumes:
5660
- ollama:/root/.ollama
61+
ports:
62+
- "11434:11434"
5763
networks:
5864
- public
5965
environment:
60-
- "OLLAMA_HOST=0.0.0.0:11434"
66+
- "OLLAMA_HOST=http://ollama_demo_service:11434"
6167
tty: true
6268
restart: unless-stopped
6369

@@ -83,10 +89,83 @@ services:
8389
- 'WEBUI_AUTH=False'
8490
restart: unless-stopped
8591

86-
volumes:
87-
ollama: {}
88-
open-webui: {}
92+
ollama_for_agent_service:
93+
image: ghcr.io/amperecomputingai/ollama-ampere:1.0.0-ol9
94+
container_name: ollama_for_agent_service
95+
restart: unless-stopped
96+
ports:
97+
- "11434:11434"
98+
networks:
99+
- public
100+
volumes:
101+
- n8n_ollama_data:/root/.ollama
102+
environment:
103+
- "OLLAMA_HOST=http://ollama_for_agent_service:11434"
104+
tty: true
105+
entrypoint: "bash -c \"ollama serve & sleep 5 && ollama pull llama3.2:1b && wait\""
106+
107+
searxng:
108+
container_name: searxng
109+
image: docker.io/searxng/searxng:2025.9.23-a57b29b00
110+
user: "977:977"
111+
restart: unless-stopped
112+
ports:
113+
- "8081:8080"
114+
volumes:
115+
- ./searxng:/etc/searxng:rw
116+
- n8n_searxng:/var/cache/searxng:rw
117+
- /var/run/docker.sock:/var/run/docker.sock
118+
networks:
119+
- public
120+
environment:
121+
- SEARXNG_BASE_URL=http://searxng:8080
122+
- SEARXNG_DEBUG=1
123+
- SEARXNG_HOSTNAME=searxng
124+
logging:
125+
driver: "json-file"
126+
options:
127+
max-size: "1m"
128+
max-file: "1"
129+
130+
agentic_ai_demo_service:
131+
#image: ghcr.io/amperecomputingai/ampere-ai-agents:0.1.3
132+
image: ghcr.io/amperecomputingai/ampere-ai-agents:0.1.1
133+
build:
134+
context: .
135+
dockerfile: Dockerfile
136+
container_name: agentic_ai_demo_service
137+
depends_on:
138+
- ollama_for_agent_service
139+
- searxng
140+
restart: always
141+
ports:
142+
- "7864:5678"
143+
networks:
144+
- public
145+
volumes:
146+
- n8n_data:/home/node/.n8n
147+
- /var/run/docker.sock:/var/run/docker.sock
148+
environment:
149+
- NODE_ENV=development
150+
- N8N_BASIC_AUTH_ACTIVE=false # Disables authentication
151+
- N8N_HOST=localhost
152+
- N8N_PORT=5678
153+
- N8N_PROTOCOL=http
154+
- N8N_USER_MANAGEMENT_DISABLED=true
155+
- N8N_READ_ONLY=true
156+
- N8N_INITIAL_SETUP_COMPLETED=true
157+
- N8N_LOG_LEVEL=debug
158+
- N8N_COMMUNITY_PACKAGES_ENABLED=true
159+
- N8N_UNVERIFIED_COMMUNITY_PACKAGES_ENABLED=true
160+
- N8N_COMMUNITY_PACKAGES_ALLOW_TOOL_USAGE=true
89161

90162
networks:
91163
public:
92164
driver: bridge
165+
166+
volumes:
167+
ollama: {}
168+
open-webui: {}
169+
n8n_data: {}
170+
n8n_ollama_data: {}
171+
n8n_searxng: {}

0 commit comments

Comments
 (0)