forked from Mcrich23/Container-Compose
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrun-tests.sh
More file actions
executable file
·416 lines (369 loc) · 14.7 KB
/
run-tests.sh
File metadata and controls
executable file
·416 lines (369 loc) · 14.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
#!/bin/bash
# run-tests.sh - Unified Test Runner for Container-Compose
# Manifest-driven orchestrator with build lock, cleanup, telemetry, and validation.
#
# Usage: ./run-tests.sh [--auto-clean] [test-filter]
#
# Reads tests-manifest.json for target definitions, timeouts, and expected counts.
# Replaces the old split between run-tests.sh and run-tests-full.sh.
set -uo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
MANIFEST="$SCRIPT_DIR/tests-manifest.json"
LOG_DIR="$SCRIPT_DIR/logs"
mkdir -p "$LOG_DIR"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
TIER_LOG="$LOG_DIR/tiered_output_${TIMESTAMP}.txt"
# ============================================================================
# BUILD MUTEX: Prevent concurrent Swift builds using flock
# ============================================================================
BUILD_LOCK_FILE="/tmp/container-compose-test.lock"
acquire_build_lock() {
if command -v flock &> /dev/null; then
exec 200>"$BUILD_LOCK_FILE"
if flock -n 200 2>/dev/null; then
echo "✓ Build lock acquired"
else
echo "🔒 Waiting for another build..."
flock 200 2>/dev/null || true
echo "✓ Build lock acquired after wait"
fi
elif [ -f "$BUILD_LOCK_FILE" ]; then
local old_pid=$(cat "$BUILD_LOCK_FILE" 2>/dev/null)
if [ -n "$old_pid" ] && kill -0 "$old_pid" 2>/dev/null; then
echo "🔒 Another build running (PID: $old_pid) — waiting..."
while kill -0 "$old_pid" 2>/dev/null; do sleep 1; done
fi
echo $$ > "$BUILD_LOCK_FILE"
else
echo $$ > "$BUILD_LOCK_FILE"
fi
}
release_build_lock() {
exec 200>&- 2>/dev/null || true
rm -f "$BUILD_LOCK_FILE" 2>/dev/null || true
}
acquire_build_lock
trap release_build_lock EXIT
# ============================================================================
# VICTORIA PROTOCOL: RUN_ID for surgical container tracking
# ============================================================================
RUN_ID="t$$"
export RUN_ID
export CCT_RUN_ID="$RUN_ID"
export TELEMETRY_RUN_ID="$RUN_ID"
export CONTAINER_COMPOSE_SKIP_AMFI="1"
echo "=========================================="
echo "Container-Compose Test Runner"
echo "=========================================="
echo "RUN_ID: $RUN_ID"
echo "Started: $(date)"
echo ""
echo "Log files:"
echo " Output: $TIER_LOG"
echo " Telemetry: $LOG_DIR/resource_usage_${RUN_ID}_${TIMESTAMP}.csv"
echo ""
echo " tail -f $TIER_LOG"
echo ""
# ============================================================================
# PARSE FLAGS
# ============================================================================
AUTO_CLEAN=false
USER_FILTER=""
for arg in "$@"; do
case "$arg" in
--auto-clean) AUTO_CLEAN=true; export AUTO_CLEAN ;;
--filter=*) USER_FILTER="${arg#--filter=}" ;;
-*) ;; # ignore unknown flags
*) USER_FILTER="$arg" ;;
esac
done
# ============================================================================
# LOAD LIBRARIES & ENVIRONMENT
# ============================================================================
source "$SCRIPT_DIR/scripts/lib/container-cleanup.sh"
source "$SCRIPT_DIR/scripts/lib/test-runner.sh"
source "$SCRIPT_DIR/scripts/env-setup.sh"
setup_test_logging "$SCRIPT_DIR"
# Stale SwiftPM lock cleanup
for lock in ".build/.lock" ".build/index-build/.lock"; do
if [ -f "$SCRIPT_DIR/$lock" ]; then
if ! pgrep -x "swift-build" > /dev/null 2>&1 && ! pgrep -x "swift-frontend" > /dev/null 2>&1; then
rm -f "$SCRIPT_DIR/$lock" 2>/dev/null || true
echo "✓ Removed stale lock: $lock"
fi
fi
done
# Export OCI_REGISTRY_URL
export OCI_REGISTRY_URL
# ============================================================================
# CLEANUP (Victoria Protocol)
# ============================================================================
victoria_cleanup() {
local exit_code=$?
echo ""
echo "=========================================="
echo "Victoria Protocol: Surgical Cleanup"
echo "=========================================="
if [ -f "$SCRIPT_DIR/scripts/cleanup-orchestrator.sh" ]; then
bash "$SCRIPT_DIR/scripts/cleanup-orchestrator.sh" "$RUN_ID" --graceful
else
cleanup_test_containers
fi
exit $exit_code
}
trap victoria_cleanup EXIT INT TERM
echo "Pre-flight: Purging orphaned containers..."
aggressive_cleanup_before_tests
echo ""
# ============================================================================
# MANIFEST VALIDATION
# ============================================================================
if [[ ! -f "$MANIFEST" ]]; then
echo "ERROR: Manifest not found: $MANIFEST"
exit 1
fi
# Print manifest summary
python3 << PYEOF
import json
with open('$MANIFEST') as f:
m = json.load(f)
print(f"Project: {m['project']}")
print(f"Toolchain: {m.get('toolchain', 'unknown')}")
print(f"Targets: {len(m['targets'])}")
total_expected = sum(t['expected_count'] for t in m['targets'])
print(f"Total expected tests: {total_expected}")
if 'known_issues' in m:
for ki in m['known_issues']:
print(f" Known issue: {ki['id']} — {ki['description']}")
PYEOF
echo ""
# ============================================================================
# TEST PORTS
# ============================================================================
export TEST_PORT_WORDPRESS="${TEST_PORT_WORDPRESS:-18080}"
export TEST_PORT_WEB="${TEST_PORT_WEB:-18081}"
export TEST_PORT_GATEWAY="${TEST_PORT_GATEWAY:-18082}"
export TEST_PORT_API="${TEST_PORT_API:-18083}"
export TEST_PORT_APP="${TEST_PORT_APP:-13000}"
export TEST_PORT_WEB2="${TEST_PORT_WEB2:-18084}"
# ============================================================================
# RESOURCE TELEMETRY
# ============================================================================
RESOURCE_LOG="$LOG_DIR/resource_usage_${RUN_ID}_${TIMESTAMP}.csv"
echo "Resource Telemetry: $RESOURCE_LOG"
echo ""
"$SCRIPT_DIR/scripts/resource-monitor.sh" "$RESOURCE_LOG" &
MONITOR_PID=$!
cleanup_monitor() {
kill $MONITOR_PID 2>/dev/null || true
wait $MONITOR_PID 2>/dev/null || true
}
# ============================================================================
# XPC HEALTH CHECK
# ============================================================================
check_xpc_health() {
echo "=========================================="
echo "XPC HEALTH CHECK: Apple Container Daemon"
echo "=========================================="
if [ -f "$SCRIPT_DIR/.build/debug/Container-Compose" ]; then
"$SCRIPT_DIR/.build/debug/Container-Compose" xpc-health 2>&1 || true
else
echo "⚠️ Built binary not found — skipping XPC health check"
fi
echo ""
}
check_xpc_health
# ============================================================================
# GENERATE TARGET LIST FROM MANIFEST
# ============================================================================
TARGETS_FILE=$(mktemp)
python3 << PYEOF > "$TARGETS_FILE"
import json
with open('$MANIFEST') as f:
m = json.load(f)
for t in m['targets']:
name = t['name']
filter_pat = t['filter']
parallel = str(t.get('parallel', False)).lower()
expected = t['expected_count']
requires = str(t.get('requires_containers', False)).lower()
timeout = t.get('timeout_seconds', 300)
print(f"{name}|{filter_pat}|{parallel}|{expected}|{requires}|{timeout}")
PYEOF
# ============================================================================
# MANIFEST-DRIVEN TEST EXECUTION
# ============================================================================
TOTAL_TESTS=0
TOTAL_PASSED=0
TOTAL_FAILED=0
TOTAL_SKIPPED=0
declare -a FAILED_TARGETS=()
declare -a SKIPPED_TARGETS=()
TEST_EXIT_CODE=0
if [[ -n "$USER_FILTER" ]]; then
# User filter mode — run single target
echo "=========================================="
echo "Running with user filter: $USER_FILTER"
echo "=========================================="
stdbuf -oL swift test --no-parallel --filter "$USER_FILTER" 2>&1 | tee -a "$TIER_LOG"
TEST_EXIT_CODE=${PIPESTATUS[0]}
else
# Manifest-driven: run each target in priority order
while IFS='|' read -r name filter parallel expected requires timeout; do
[[ -z "$name" ]] && continue
echo ""
echo "=========================================="
echo "Target: $name"
echo "Expected: $expected tests | Timeout: ${timeout}s"
echo "=========================================="
# Skip container-dependent targets if OCI_REGISTRY_URL not set
if [[ "$requires" == "true" ]] && [[ -z "${OCI_REGISTRY_URL:-}" ]]; then
echo "⚠️ SKIPPED: OCI_REGISTRY_URL not set"
SKIPPED_TARGETS+=("$name")
TOTAL_SKIPPED=$((TOTAL_SKIPPED + expected))
continue
fi
# Parallel mode from manifest
local_parallel_args="--no-parallel"
if [[ "$parallel" == "true" ]]; then
local_parallel_args="--parallel --num-workers 2"
fi
target_log="$LOG_DIR/${name}_${TIMESTAMP}.log"
echo "Command: swift test $local_parallel_args --filter \"$filter\""
# Run with timeout
swift test $local_parallel_args --filter "$filter" 2>&1 | tee "$target_log" &
TEST_PID=$!
elapsed=0
interval=5
while kill -0 $TEST_PID 2>/dev/null; do
if [[ $elapsed -ge $timeout ]]; then
echo ""
echo "🚨 TIMEOUT after ${timeout}s — killing process"
kill -9 $TEST_PID 2>/dev/null || true
wait $TEST_PID 2>/dev/null || true
break
fi
sleep $interval
elapsed=$((elapsed + interval))
done
wait $TEST_PID 2>/dev/null || true
# Parse results from log
if [[ -f "$target_log" ]]; then
executed=$(grep -E "Executed [0-9]+ tests" "$target_log" | tail -1 | grep -oE "Executed [0-9]+" | grep -oE "[0-9]+" || echo "0")
failures=$(grep -E "[0-9]+ failures" "$target_log" | tail -1 | grep -oE "[0-9]+ failures" | grep -oE "[0-9]+" || echo "0")
skipped_line=$(grep -E "[0-9]+ tests skipped" "$target_log" | tail -1 | grep -oE "[0-9]+" | head -1 || echo "0")
passed=$((executed - failures - skipped_line))
echo ""
echo "Executed: $executed | Passed: $passed | Skipped: $skipped_line | Failed: $failures"
if [[ "$executed" == "0" ]]; then
echo "🚨 FAIL: 0 tests executed (expected $expected)"
FAILED_TARGETS+=("$name")
TOTAL_FAILED=$((TOTAL_FAILED + expected))
TEST_EXIT_CODE=1
elif [[ "$failures" != "0" ]]; then
echo "🚨 FAIL: $failures tests failed"
FAILED_TARGETS+=("$name")
TOTAL_FAILED=$((TOTAL_FAILED + failures))
TEST_EXIT_CODE=1
else
echo "✓ PASS: All $executed tests passed"
fi
TOTAL_TESTS=$((TOTAL_TESTS + executed))
TOTAL_PASSED=$((TOTAL_PASSED + passed))
else
echo "🚨 FAIL: No log file created"
FAILED_TARGETS+=("$name")
TOTAL_FAILED=$((TOTAL_FAILED + expected))
TEST_EXIT_CODE=1
fi
done < "$TARGETS_FILE"
fi
rm -f "$TARGETS_FILE"
# ============================================================================
# COPY LOG & STOP MONITOR
# ============================================================================
cp "$TIER_LOG" "$LOG_DIR/test_output_${TIMESTAMP}.txt"
cleanup_monitor
# ============================================================================
# FINAL SUMMARY
# ============================================================================
echo ""
echo "=========================================="
echo "FINAL SUMMARY"
echo "=========================================="
echo "Total executed: $TOTAL_TESTS"
echo "Passed: $TOTAL_PASSED"
echo "Failed: $TOTAL_FAILED"
echo "Skipped targets: ${#SKIPPED_TARGETS[@]}"
if [[ ${#SKIPPED_TARGETS[@]} -gt 0 ]]; then
echo ""
echo "Skipped targets:"
for t in "${SKIPPED_TARGETS[@]}"; do echo " - $t"; done
fi
if [[ ${#FAILED_TARGETS[@]} -gt 0 ]]; then
echo ""
echo "Failed targets:"
for t in "${FAILED_TARGETS[@]}"; do echo " - $t"; done
fi
# Write JSON summary
summary_file="$LOG_DIR/summary_${TIMESTAMP}.json"
cat > "$summary_file" << EOF
{
"timestamp": "$TIMESTAMP",
"run_id": "$RUN_ID",
"total_executed": $TOTAL_TESTS,
"total_passed": $TOTAL_PASSED,
"total_failed": $TOTAL_FAILED,
"skipped_targets": [$(IFS=,; printf '"%s"' "${SKIPPED_TARGETS[*]}")],
"failed_targets": [$(IFS=,; printf '"%s"' "${FAILED_TARGETS[*]}")],
"status": "$([ $TOTAL_FAILED -eq 0 ] && echo PASS || echo FAIL)"
}
EOF
echo ""
echo "Summary: $summary_file"
# ============================================================================
# RESOURCE TELEMETRY SUMMARY
# ============================================================================
echo ""
echo "=========================================="
echo "Resource Usage"
echo "=========================================="
if [[ -f "$RESOURCE_LOG" ]] && [[ -s "$RESOURCE_LOG" ]]; then
min_free=$(tail -n +2 "$RESOURCE_LOG" | cut -d',' -f2 | sort -n | head -1)
max_free=$(tail -n +2 "$RESOURCE_LOG" | cut -d',' -f2 | sort -n | tail -1)
avg_free=$(tail -n +2 "$RESOURCE_LOG" | cut -d',' -f2 | awk '{sum+=$1} END {printf "%.0f", sum/NR}')
critical=$(grep ",2$" "$RESOURCE_LOG" | wc -l | tr -d ' ')
total=$(tail -n +2 "$RESOURCE_LOG" | wc -l | tr -d ' ')
echo "Memory (free MB): min=$min_free, max=$max_free, avg=$avg_free"
echo "Critical pressure samples: $critical / $total"
if [[ -n "$min_free" ]] && [[ "$min_free" -lt 500 ]]; then
echo "⚠️ WARNING: Low memory detected — failures may be OOM, not logic bugs"
fi
echo "Log: $RESOURCE_LOG"
else
echo "No telemetry data collected"
fi
# ============================================================================
# PERFORMANCE DASHBOARD
# ============================================================================
if command -v python3 &> /dev/null && [[ -f "$LOG_DIR/test_output_${TIMESTAMP}.txt" ]]; then
echo ""
echo "=========================================="
echo "Performance Dashboard"
echo "=========================================="
python3 "$SCRIPT_DIR/scripts/analyze-performance.py" \
"$LOG_DIR/test_output_${TIMESTAMP}.txt" \
"$RESOURCE_LOG" \
--run-id "$RUN_ID" 2>/dev/null || echo "Performance analysis unavailable"
fi
echo ""
echo "Completed: $(date)"
echo "=========================================="
if [[ $TOTAL_FAILED -gt 0 ]]; then
echo "⚠️ SOME TESTS FAILED ($TOTAL_FAILED)"
exit 1
else
echo "✅ ALL $TOTAL_PASSED TESTS PASSED"
exit 0
fi