diff --git a/.changeset/patch-add-import-schema-uses-with.md b/.changeset/patch-add-import-schema-uses-with.md
new file mode 100644
index 0000000000..45c85d2254
--- /dev/null
+++ b/.changeset/patch-add-import-schema-uses-with.md
@@ -0,0 +1,7 @@
+---
+"gh-aw": patch
+---
+
+Add support for workflow imports using `uses`/`with` syntax with `import-schema` validation, including typed input validation and `github.aw.import-inputs.*` expression support in imported content.
+
+Deprecate `tools.serena` in favor of `mcp-servers.serena` via shared Serena workflows, and migrate bundled workflows to `shared/mcp/serena.md` and `shared/mcp/serena-go.md`.
diff --git a/.github/workflows/archie.lock.yml b/.github/workflows/archie.lock.yml
index f152573229..c88e4b66cb 100644
--- a/.github/workflows/archie.lock.yml
+++ b/.github/workflows/archie.lock.yml
@@ -25,8 +25,9 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"f329d3c08ef93b191f2d36de24b506dafe7791237820e1f316793c37babda6a2","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"4f7e5c41cd2f17c529fb86458709bbce3bf49628d87bd079f9b43be66051fdc3","strict":true,"agent_id":"copilot"}
name: "Archie"
"on":
@@ -183,14 +184,14 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_30a9c6a3242e32e4_EOF'
+ cat << 'GH_AW_PROMPT_83a341c5f65cdb55_EOF'
- GH_AW_PROMPT_30a9c6a3242e32e4_EOF
+ GH_AW_PROMPT_83a341c5f65cdb55_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_30a9c6a3242e32e4_EOF'
+ cat << 'GH_AW_PROMPT_83a341c5f65cdb55_EOF'
Tools: add_comment, missing_tool, missing_data, noop
@@ -222,29 +223,62 @@ jobs:
{{/if}}
- GH_AW_PROMPT_30a9c6a3242e32e4_EOF
+ GH_AW_PROMPT_83a341c5f65cdb55_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then
cat "${RUNNER_TEMP}/gh-aw/prompts/pr_context_prompt.md"
fi
- cat << 'GH_AW_PROMPT_30a9c6a3242e32e4_EOF'
+ cat << 'GH_AW_PROMPT_83a341c5f65cdb55_EOF'
- GH_AW_PROMPT_30a9c6a3242e32e4_EOF
- cat << 'GH_AW_PROMPT_30a9c6a3242e32e4_EOF'
+ GH_AW_PROMPT_83a341c5f65cdb55_EOF
+ cat << 'GH_AW_PROMPT_83a341c5f65cdb55_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_83a341c5f65cdb55_EOF
+ cat << 'GH_AW_PROMPT_83a341c5f65cdb55_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_30a9c6a3242e32e4_EOF
- cat << 'GH_AW_PROMPT_30a9c6a3242e32e4_EOF'
+ GH_AW_PROMPT_83a341c5f65cdb55_EOF
+ cat << 'GH_AW_PROMPT_83a341c5f65cdb55_EOF'
{{#runtime-import .github/workflows/archie.md}}
- GH_AW_PROMPT_30a9c6a3242e32e4_EOF
+ GH_AW_PROMPT_83a341c5f65cdb55_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_ACTOR: ${{ github.actor }}
GH_AW_EXPR_799BE623: ${{ github.event.issue.number || github.event.pull_request.number }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: ${{ steps.sanitized.outputs.text }}
with:
script: |
@@ -415,12 +449,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_099f1ddd4173164e_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_4624e7d6e465cf69_EOF'
{"add_comment":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_099f1ddd4173164e_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_4624e7d6e465cf69_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_939305e6b722e2b7_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_83f194910baacdef_EOF'
{
"description_suffixes": {
"add_comment": " CONSTRAINTS: Maximum 1 comment(s) can be added."
@@ -428,8 +462,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_939305e6b722e2b7_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_2a42759f204cd001_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_83f194910baacdef_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_18ad0b906facb805_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -507,7 +541,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_2a42759f204cd001_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_18ad0b906facb805_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -575,7 +609,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_8cdecb3d52491267_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_56eaa8772cc3b8fe_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -611,10 +645,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -631,7 +679,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_8cdecb3d52491267_EOF
+ GH_AW_MCP_CONFIG_56eaa8772cc3b8fe_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/cloclo.lock.yml b/.github/workflows/cloclo.lock.yml
index edb2501f5e..7c12eb828f 100644
--- a/.github/workflows/cloclo.lock.yml
+++ b/.github/workflows/cloclo.lock.yml
@@ -25,8 +25,9 @@
# Imports:
# - shared/jqschema.md
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"ad111cf099d958c340ed839a8e88f5053a501ce37fb239bdc2645ee1a872d14a","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"e42b64ea2fb20f9c883549780d089111742a80762ff916976fd105379c709842","strict":true,"agent_id":"claude"}
name: "/cloclo"
"on":
@@ -220,9 +221,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_6a693d543d77aba7_EOF'
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
- GH_AW_PROMPT_6a693d543d77aba7_EOF
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -230,12 +231,12 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_6a693d543d77aba7_EOF'
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
Tools: add_comment, create_pull_request, missing_tool, missing_data, noop
- GH_AW_PROMPT_6a693d543d77aba7_EOF
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
- cat << 'GH_AW_PROMPT_6a693d543d77aba7_EOF'
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
The following GitHub context information is available for this workflow:
@@ -265,28 +266,62 @@ jobs:
{{/if}}
- GH_AW_PROMPT_6a693d543d77aba7_EOF
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then
cat "${RUNNER_TEMP}/gh-aw/prompts/pr_context_prompt.md"
fi
- cat << 'GH_AW_PROMPT_6a693d543d77aba7_EOF'
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
- GH_AW_PROMPT_6a693d543d77aba7_EOF
- cat << 'GH_AW_PROMPT_6a693d543d77aba7_EOF'
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
{{#runtime-import .github/workflows/shared/jqschema.md}}
- GH_AW_PROMPT_6a693d543d77aba7_EOF
- cat << 'GH_AW_PROMPT_6a693d543d77aba7_EOF'
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_6a693d543d77aba7_EOF
- cat << 'GH_AW_PROMPT_6a693d543d77aba7_EOF'
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
+ cat << 'GH_AW_PROMPT_bf19b3b49e68a662_EOF'
{{#runtime-import .github/workflows/cloclo.md}}
- GH_AW_PROMPT_6a693d543d77aba7_EOF
+ GH_AW_PROMPT_bf19b3b49e68a662_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_ACTOR: ${{ github.actor }}
GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }}
GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }}
@@ -547,12 +582,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_69437f0f6506cb2c_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_810dc3a4b344eff4_EOF'
{"add_comment":{"max":1},"create_pull_request":{"excluded_files":[".github/workflows/*.lock.yml"],"expires":48,"labels":["automation","cloclo"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS"],"protected_files_policy":"fallback-to-issue","protected_path_prefixes":[".github/",".agents/"],"title_prefix":"[cloclo] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_69437f0f6506cb2c_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_810dc3a4b344eff4_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_ccf60b8dad31a5f5_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_407c3353b2fc7ba5_EOF'
{
"description_suffixes": {
"add_comment": " CONSTRAINTS: Maximum 1 comment(s) can be added.",
@@ -561,8 +596,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_ccf60b8dad31a5f5_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_dcda1f4a11de5ab9_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_407c3353b2fc7ba5_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_4c373e5740c7f0bf_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -676,7 +711,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_dcda1f4a11de5ab9_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_4c373e5740c7f0bf_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -745,7 +780,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_fe27378e560ffd91_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_9b523dbe2a703a1c_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"agenticworkflows": {
@@ -820,11 +855,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -833,7 +865,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -850,7 +888,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_fe27378e560ffd91_EOF
+ GH_AW_MCP_CONFIG_9b523dbe2a703a1c_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -976,13 +1014,14 @@ jobs:
# - mcp__playwright__browser_take_screenshot
# - mcp__playwright__browser_type
# - mcp__playwright__browser_wait_for
+ # - mcp__serena
timeout-minutes: 20
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
# shellcheck disable=SC1003
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.1 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --max-turns 100 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(git),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --max-turns 100 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(git),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for,mcp__serena'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
diff --git a/.github/workflows/daily-compiler-quality.lock.yml b/.github/workflows/daily-compiler-quality.lock.yml
index b515b4cfe3..aa5719d894 100644
--- a/.github/workflows/daily-compiler-quality.lock.yml
+++ b/.github/workflows/daily-compiler-quality.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"43b61ccd4023929211202d467bd670da7af659d6869c4582f175822d3493c087","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"5a9f20ae90588d58e44d24939df67e2cac4a5b7f8914793ecca1886e4d62c025","strict":true,"agent_id":"copilot"}
name: "Daily Compiler Quality Check"
"on":
@@ -132,15 +133,15 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_0d4b45a4673705c1_EOF'
+ cat << 'GH_AW_PROMPT_b970e5e6eac6d8c6_EOF'
- GH_AW_PROMPT_0d4b45a4673705c1_EOF
+ GH_AW_PROMPT_b970e5e6eac6d8c6_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_0d4b45a4673705c1_EOF'
+ cat << 'GH_AW_PROMPT_b970e5e6eac6d8c6_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -172,27 +173,60 @@ jobs:
{{/if}}
- GH_AW_PROMPT_0d4b45a4673705c1_EOF
+ GH_AW_PROMPT_b970e5e6eac6d8c6_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_0d4b45a4673705c1_EOF'
+ cat << 'GH_AW_PROMPT_b970e5e6eac6d8c6_EOF'
- GH_AW_PROMPT_0d4b45a4673705c1_EOF
- cat << 'GH_AW_PROMPT_0d4b45a4673705c1_EOF'
+ GH_AW_PROMPT_b970e5e6eac6d8c6_EOF
+ cat << 'GH_AW_PROMPT_b970e5e6eac6d8c6_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_b970e5e6eac6d8c6_EOF
+ cat << 'GH_AW_PROMPT_b970e5e6eac6d8c6_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_0d4b45a4673705c1_EOF
- cat << 'GH_AW_PROMPT_0d4b45a4673705c1_EOF'
+ GH_AW_PROMPT_b970e5e6eac6d8c6_EOF
+ cat << 'GH_AW_PROMPT_b970e5e6eac6d8c6_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_0d4b45a4673705c1_EOF
- cat << 'GH_AW_PROMPT_0d4b45a4673705c1_EOF'
+ GH_AW_PROMPT_b970e5e6eac6d8c6_EOF
+ cat << 'GH_AW_PROMPT_b970e5e6eac6d8c6_EOF'
{{#runtime-import .github/workflows/daily-compiler-quality.md}}
- GH_AW_PROMPT_0d4b45a4673705c1_EOF
+ GH_AW_PROMPT_b970e5e6eac6d8c6_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -369,12 +403,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_39f07e2db1dd9b70_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_fa8d111c1bf39606_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_39f07e2db1dd9b70_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_fa8d111c1bf39606_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_4a50ea31f575723b_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_70d1271248e2c4de_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Discussions will be created in category \"audits\"."
@@ -382,8 +416,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_4a50ea31f575723b_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_e89b79c038164355_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_70d1271248e2c4de_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_3174de6c519b1020_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -469,7 +503,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_e89b79c038164355_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_3174de6c519b1020_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -537,7 +571,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_e40c1268afe8925f_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_877a3804ecde64ab_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -573,10 +607,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -593,7 +641,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_e40c1268afe8925f_EOF
+ GH_AW_MCP_CONFIG_877a3804ecde64ab_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 91f89dd7b1..d2623348ef 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -26,10 +26,11 @@
# Imports:
# - shared/activation-app.md
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
# - shared/safe-output-app.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"60f4cf6a578b2ab4852691e13e7c9bd5ab13c10366caeeb5dd264e40238e0f8c","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"731a8da7278ff49fd3bdc528122d07be9ab0a38c5d558b1d8b0c9324593dcc20","strict":true,"agent_id":"copilot"}
name: "Daily File Diet"
"on":
@@ -136,14 +137,14 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
+ GH_AW_PROMPT_644bf511152f4368_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
Tools: create_issue, missing_tool, missing_data, noop
@@ -175,33 +176,66 @@ jobs:
{{/if}}
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
+ GH_AW_PROMPT_644bf511152f4368_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ GH_AW_PROMPT_644bf511152f4368_EOF
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_644bf511152f4368_EOF
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
{{#runtime-import .github/workflows/shared/activation-app.md}}
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ GH_AW_PROMPT_644bf511152f4368_EOF
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ GH_AW_PROMPT_644bf511152f4368_EOF
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
{{#runtime-import .github/workflows/shared/safe-output-app.md}}
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ GH_AW_PROMPT_644bf511152f4368_EOF
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
- cat << 'GH_AW_PROMPT_b4a9e8ca276b0786_EOF'
+ GH_AW_PROMPT_644bf511152f4368_EOF
+ cat << 'GH_AW_PROMPT_644bf511152f4368_EOF'
{{#runtime-import .github/workflows/daily-file-diet.md}}
- GH_AW_PROMPT_b4a9e8ca276b0786_EOF
+ GH_AW_PROMPT_644bf511152f4368_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -364,12 +398,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_0ce3f06724b6d964_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_5dc57ba9bf0d98a6_EOF'
{"create_issue":{"expires":48,"labels":["refactoring","code-health","automated-analysis","cookie"],"max":1,"title_prefix":"[file-diet] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_0ce3f06724b6d964_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_5dc57ba9bf0d98a6_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_5a42881a59118bec_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_1eaa342570b4cc57_EOF'
{
"description_suffixes": {
"create_issue": " CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[file-diet] \". Labels [\"refactoring\" \"code-health\" \"automated-analysis\" \"cookie\"] will be automatically added."
@@ -377,8 +411,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_5a42881a59118bec_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_73ff4675147cd80c_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_1eaa342570b4cc57_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_94a11e8ba272b1e0_EOF'
{
"create_issue": {
"defaultMax": 1,
@@ -471,7 +505,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_73ff4675147cd80c_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_94a11e8ba272b1e0_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -539,7 +573,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_5d601dc71f5da81d_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_8ee2a3272550afae_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -575,10 +609,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -595,7 +643,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_5d601dc71f5da81d_EOF
+ GH_AW_MCP_CONFIG_8ee2a3272550afae_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/daily-function-namer.lock.yml b/.github/workflows/daily-function-namer.lock.yml
index e5ebc80922..29bc34c3ae 100644
--- a/.github/workflows/daily-function-namer.lock.yml
+++ b/.github/workflows/daily-function-namer.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"a85f10b6aae5ca037940831669d4eab46429191beb8ffd0ea4dc43d5473e2e41","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"3000e055e2609285d5978a0e5a7929aaa0a2e0349fb8633338acdac9368ec7b0","strict":true,"agent_id":"claude"}
name: "Daily Go Function Namer"
"on":
@@ -138,15 +139,15 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_7276b18a84f004a5_EOF'
+ cat << 'GH_AW_PROMPT_d74f3ecec71ef387_EOF'
- GH_AW_PROMPT_7276b18a84f004a5_EOF
+ GH_AW_PROMPT_d74f3ecec71ef387_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_7276b18a84f004a5_EOF'
+ cat << 'GH_AW_PROMPT_d74f3ecec71ef387_EOF'
Tools: create_issue, missing_tool, missing_data, noop
@@ -178,28 +179,61 @@ jobs:
{{/if}}
- GH_AW_PROMPT_7276b18a84f004a5_EOF
+ GH_AW_PROMPT_d74f3ecec71ef387_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_7276b18a84f004a5_EOF'
+ cat << 'GH_AW_PROMPT_d74f3ecec71ef387_EOF'
- GH_AW_PROMPT_7276b18a84f004a5_EOF
- cat << 'GH_AW_PROMPT_7276b18a84f004a5_EOF'
+ GH_AW_PROMPT_d74f3ecec71ef387_EOF
+ cat << 'GH_AW_PROMPT_d74f3ecec71ef387_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_d74f3ecec71ef387_EOF
+ cat << 'GH_AW_PROMPT_d74f3ecec71ef387_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_7276b18a84f004a5_EOF
- cat << 'GH_AW_PROMPT_7276b18a84f004a5_EOF'
+ GH_AW_PROMPT_d74f3ecec71ef387_EOF
+ cat << 'GH_AW_PROMPT_d74f3ecec71ef387_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_7276b18a84f004a5_EOF
- cat << 'GH_AW_PROMPT_7276b18a84f004a5_EOF'
+ GH_AW_PROMPT_d74f3ecec71ef387_EOF
+ cat << 'GH_AW_PROMPT_d74f3ecec71ef387_EOF'
{{#runtime-import .github/workflows/daily-function-namer.md}}
- GH_AW_PROMPT_7276b18a84f004a5_EOF
+ GH_AW_PROMPT_d74f3ecec71ef387_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -377,12 +411,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_27997ca4d76a55fe_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_b4262f43d1508282_EOF'
{"create_issue":{"close_older_issues":true,"expires":168,"labels":["refactoring","code-quality","automated-analysis","cookie"],"max":1,"title_prefix":"[function-namer] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_27997ca4d76a55fe_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_b4262f43d1508282_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_9c4266e242a5bea1_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_c995dcc4451d6716_EOF'
{
"description_suffixes": {
"create_issue": " CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[function-namer] \". Labels [\"refactoring\" \"code-quality\" \"automated-analysis\" \"cookie\"] will be automatically added."
@@ -390,8 +424,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_9c4266e242a5bea1_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_4fe343daf943fee7_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_c995dcc4451d6716_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_c2f6895f721f8c42_EOF'
{
"create_issue": {
"defaultMax": 1,
@@ -484,7 +518,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_4fe343daf943fee7_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_c2f6895f721f8c42_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -551,7 +585,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_01c46a096a7eed62_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_689ec7840b2984fe_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -584,11 +618,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -597,7 +628,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -614,7 +651,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_01c46a096a7eed62_EOF
+ GH_AW_MCP_CONFIG_689ec7840b2984fe_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -697,13 +734,14 @@ jobs:
# - mcp__github__search_pull_requests
# - mcp__github__search_repositories
# - mcp__github__search_users
+ # - mcp__serena
timeout-minutes: 30
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
# shellcheck disable=SC1003
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.1 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__serena'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
diff --git a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml
index fcd04809c2..79cb622be5 100644
--- a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml
+++ b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml
@@ -24,10 +24,11 @@
#
# Resolved workflow manifest:
# Imports:
+# - shared/mcp/serena.md
# - shared/reporting.md
# - shared/safe-output-app.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"b0c391a57f7bcad63b9ad97d567a945693c8c70fdd52192cd27b595bebd0cfe5","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"abbc78f06760a9f80b792eb99861f1073fb0ce6760c313e9f8de88c1d8819943","strict":true,"agent_id":"copilot"}
name: "Daily MCP Tool Concurrency Analysis"
"on":
@@ -131,15 +132,15 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_8c02d1d8ed68d57d_EOF'
+ cat << 'GH_AW_PROMPT_6be3719fff67b4b2_EOF'
- GH_AW_PROMPT_8c02d1d8ed68d57d_EOF
+ GH_AW_PROMPT_6be3719fff67b4b2_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_8c02d1d8ed68d57d_EOF'
+ cat << 'GH_AW_PROMPT_6be3719fff67b4b2_EOF'
Tools: create_issue(max:5), create_agent_session(max:3), missing_tool, missing_data, noop
@@ -171,27 +172,60 @@ jobs:
{{/if}}
- GH_AW_PROMPT_8c02d1d8ed68d57d_EOF
+ GH_AW_PROMPT_6be3719fff67b4b2_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_8c02d1d8ed68d57d_EOF'
+ cat << 'GH_AW_PROMPT_6be3719fff67b4b2_EOF'
- GH_AW_PROMPT_8c02d1d8ed68d57d_EOF
- cat << 'GH_AW_PROMPT_8c02d1d8ed68d57d_EOF'
+ GH_AW_PROMPT_6be3719fff67b4b2_EOF
+ cat << 'GH_AW_PROMPT_6be3719fff67b4b2_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go","typescript"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_6be3719fff67b4b2_EOF
+ cat << 'GH_AW_PROMPT_6be3719fff67b4b2_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_8c02d1d8ed68d57d_EOF
- cat << 'GH_AW_PROMPT_8c02d1d8ed68d57d_EOF'
+ GH_AW_PROMPT_6be3719fff67b4b2_EOF
+ cat << 'GH_AW_PROMPT_6be3719fff67b4b2_EOF'
{{#runtime-import .github/workflows/shared/safe-output-app.md}}
- GH_AW_PROMPT_8c02d1d8ed68d57d_EOF
- cat << 'GH_AW_PROMPT_8c02d1d8ed68d57d_EOF'
+ GH_AW_PROMPT_6be3719fff67b4b2_EOF
+ cat << 'GH_AW_PROMPT_6be3719fff67b4b2_EOF'
{{#runtime-import .github/workflows/daily-mcp-concurrency-analysis.md}}
- GH_AW_PROMPT_8c02d1d8ed68d57d_EOF
+ GH_AW_PROMPT_6be3719fff67b4b2_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -368,12 +402,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_8e051da124fb13fa_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_ad543985ac33b593_EOF'
{"create_agent_session":{"max":3},"create_issue":{"expires":168,"labels":["bug","concurrency","thread-safety","automated-analysis","cookie"],"max":5,"title_prefix":"[concurrency] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_8e051da124fb13fa_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_ad543985ac33b593_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_050a3d403709e22e_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_d4d4af26e253ed4c_EOF'
{
"description_suffixes": {
"create_agent_session": " CONSTRAINTS: Maximum 3 agent task(s) can be created.",
@@ -382,8 +416,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_050a3d403709e22e_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_73308452e8e2094c_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_d4d4af26e253ed4c_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_32243ff055d8bc26_EOF'
{
"create_agent_session": {
"defaultMax": 1,
@@ -491,7 +525,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_73308452e8e2094c_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_32243ff055d8bc26_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -559,7 +593,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_adead433492b9e92_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_2f4a72e87e1742ff_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -595,10 +629,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -615,7 +663,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_adead433492b9e92_EOF
+ GH_AW_MCP_CONFIG_2f4a72e87e1742ff_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/daily-mcp-concurrency-analysis.md b/.github/workflows/daily-mcp-concurrency-analysis.md
index e10d28cca8..303c9b16d3 100644
--- a/.github/workflows/daily-mcp-concurrency-analysis.md
+++ b/.github/workflows/daily-mcp-concurrency-analysis.md
@@ -17,6 +17,9 @@ engine: copilot
imports:
- shared/reporting.md
- shared/safe-output-app.md
+ - uses: shared/mcp/serena.md
+ with:
+ languages: ["go", "typescript"]
safe-outputs:
create-issue:
@@ -28,7 +31,6 @@ safe-outputs:
max: 3
tools:
- serena: ["go", "typescript"]
cache-memory: true
github:
toolsets: [default]
diff --git a/.github/workflows/daily-testify-uber-super-expert.lock.yml b/.github/workflows/daily-testify-uber-super-expert.lock.yml
index 9f4ee880d2..f6741cf5b7 100644
--- a/.github/workflows/daily-testify-uber-super-expert.lock.yml
+++ b/.github/workflows/daily-testify-uber-super-expert.lock.yml
@@ -26,10 +26,11 @@
# Imports:
# - shared/activation-app.md
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
# - shared/safe-output-app.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"a3ed12dda9bb93205831424a7912c78407da6aca5c47a7439ee05fa24b302b25","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"65c5280d82bc63e4a8c6d43a6dff00ad369d05be73f059245c9abbffbccf4b29","strict":true,"agent_id":"copilot"}
name: "Daily Testify Uber Super Expert"
"on":
@@ -138,15 +139,15 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/repo_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
Tools: create_issue, missing_tool, missing_data, noop
@@ -178,33 +179,66 @@ jobs:
{{/if}}
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
{{#runtime-import .github/workflows/shared/activation-app.md}}
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
{{#runtime-import .github/workflows/shared/safe-output-app.md}}
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
- cat << 'GH_AW_PROMPT_9cd0d08d488cf960_EOF'
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
+ cat << 'GH_AW_PROMPT_64b2e8da352883ea_EOF'
{{#runtime-import .github/workflows/daily-testify-uber-super-expert.md}}
- GH_AW_PROMPT_9cd0d08d488cf960_EOF
+ GH_AW_PROMPT_64b2e8da352883ea_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -389,12 +423,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_db7e6f9248d6f6a9_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_fec3ebe73938ee5e_EOF'
{"create_issue":{"expires":48,"labels":["testing","code-quality","automated-analysis","cookie"],"max":1,"title_prefix":"[testify-expert] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_repo_memory":{"memories":[{"dir":"/tmp/gh-aw/repo-memory/default","id":"default","max_file_count":100,"max_file_size":51200,"max_patch_size":10240}]}}
- GH_AW_SAFE_OUTPUTS_CONFIG_db7e6f9248d6f6a9_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_fec3ebe73938ee5e_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_9c3a13645ac3b914_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_157de227a9c2d849_EOF'
{
"description_suffixes": {
"create_issue": " CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[testify-expert] \". Labels [\"testing\" \"code-quality\" \"automated-analysis\" \"cookie\"] will be automatically added."
@@ -402,8 +436,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_9c3a13645ac3b914_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_9fde0b12988c80b3_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_157de227a9c2d849_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_4054bcac36c5b0e8_EOF'
{
"create_issue": {
"defaultMax": 1,
@@ -496,7 +530,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_9fde0b12988c80b3_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_4054bcac36c5b0e8_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -564,7 +598,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_9736c2d517dff6f2_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_a1dc2f73fb56e0e6_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -600,10 +634,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -620,7 +668,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_9736c2d517dff6f2_EOF
+ GH_AW_MCP_CONFIG_a1dc2f73fb56e0e6_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/developer-docs-consolidator.lock.yml b/.github/workflows/developer-docs-consolidator.lock.yml
index ae4342fe6c..37ddae0d57 100644
--- a/.github/workflows/developer-docs-consolidator.lock.yml
+++ b/.github/workflows/developer-docs-consolidator.lock.yml
@@ -26,9 +26,10 @@
# Imports:
# - shared/mcp/qmd-docs.md
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"c1e650c04e96de0cd691406ef377ad4c56a26c4bf5ebf0a9bf5bb0eab220cb1e","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"53b37729746a7d9dcabae428065d8cf9b72719da05ac547606213ba7a56108e3","strict":true,"agent_id":"claude"}
name: "Developer Documentation Consolidator"
"on":
@@ -139,9 +140,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -149,12 +150,12 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/repo_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
Tools: create_discussion, create_pull_request, missing_tool, missing_data, noop
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
The following GitHub context information is available for this workflow:
@@ -184,30 +185,63 @@ jobs:
{{/if}}
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
{{#runtime-import .github/workflows/shared/mcp/qmd-docs.md}}
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
- cat << 'GH_AW_PROMPT_1fc3b90762b9701a_EOF'
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
+ cat << 'GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF'
{{#runtime-import .github/workflows/developer-docs-consolidator.md}}
- GH_AW_PROMPT_1fc3b90762b9701a_EOF
+ GH_AW_PROMPT_bdcc92b3fa4f82a5_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -420,12 +454,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_384fe9daa892b04d_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_38785d45ee4b9450_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":168,"fallback_to_issue":true,"max":1},"create_pull_request":{"draft":false,"expires":48,"labels":["documentation","automation"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS"],"protected_path_prefixes":[".github/",".agents/"],"title_prefix":"[docs] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_repo_memory":{"memories":[{"dir":"/tmp/gh-aw/repo-memory/default","id":"default","max_file_count":100,"max_file_size":10240,"max_patch_size":10240}]}}
- GH_AW_SAFE_OUTPUTS_CONFIG_384fe9daa892b04d_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_38785d45ee4b9450_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_d7604fa103708eb8_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_d50c0c7569e1056c_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Discussions will be created in category \"audits\".",
@@ -434,8 +468,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_d7604fa103708eb8_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_314ba2e0beb13251_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_d50c0c7569e1056c_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_35850464d31c5a13_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -557,7 +591,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_314ba2e0beb13251_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_35850464d31c5a13_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -658,7 +692,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_363204b3e9244476_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_60450871e3bbdb16_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -702,11 +736,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -715,7 +746,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -732,7 +769,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_363204b3e9244476_EOF
+ GH_AW_MCP_CONFIG_60450871e3bbdb16_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -841,13 +878,14 @@ jobs:
# - mcp__github__search_repositories
# - mcp__github__search_users
# - mcp__qmd
+ # - mcp__serena
timeout-minutes: 30
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
# shellcheck disable=SC1003
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.1 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat scratchpad/*.md),Bash(cat),Bash(date),Bash(echo),Bash(find specs -maxdepth 1 -ls),Bash(find specs -name '\''\'\'''\''*.md'\''\'\'''\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(git),Bash(grep -r '\''\'\'''\''*'\''\'\'''\'' specs),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l scratchpad/*.md),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__qmd'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat scratchpad/*.md),Bash(cat),Bash(date),Bash(echo),Bash(find specs -maxdepth 1 -ls),Bash(find specs -name '\''\'\'''\''*.md'\''\'\'''\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(git),Bash(grep -r '\''\'\'''\''*'\''\'\'''\'' specs),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l scratchpad/*.md),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__qmd,mcp__serena'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
diff --git a/.github/workflows/duplicate-code-detector.lock.yml b/.github/workflows/duplicate-code-detector.lock.yml
index a3d0a2089f..785b1e4a8d 100644
--- a/.github/workflows/duplicate-code-detector.lock.yml
+++ b/.github/workflows/duplicate-code-detector.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"1704c3b6efe3516eb9e0d969d46884c2d0d78eb116c9ea0ee2c02886a2e0c025","strict":true,"agent_id":"codex"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"45a3b3d6fa12808929677c56136017f7369ac7cb1886c88501279a51a7b6500a","strict":true,"agent_id":"codex"}
name: "Duplicate Code Detector"
"on":
@@ -140,14 +141,14 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_6482a98809493715_EOF'
+ cat << 'GH_AW_PROMPT_da0684b6b37876e5_EOF'
- GH_AW_PROMPT_6482a98809493715_EOF
+ GH_AW_PROMPT_da0684b6b37876e5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_6482a98809493715_EOF'
+ cat << 'GH_AW_PROMPT_da0684b6b37876e5_EOF'
Tools: create_issue, missing_tool, missing_data, noop
@@ -179,29 +180,62 @@ jobs:
{{/if}}
- GH_AW_PROMPT_6482a98809493715_EOF
+ GH_AW_PROMPT_da0684b6b37876e5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_6482a98809493715_EOF'
+ cat << 'GH_AW_PROMPT_da0684b6b37876e5_EOF'
- GH_AW_PROMPT_6482a98809493715_EOF
- cat << 'GH_AW_PROMPT_6482a98809493715_EOF'
+ GH_AW_PROMPT_da0684b6b37876e5_EOF
+ cat << 'GH_AW_PROMPT_da0684b6b37876e5_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_da0684b6b37876e5_EOF
+ cat << 'GH_AW_PROMPT_da0684b6b37876e5_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_6482a98809493715_EOF
- cat << 'GH_AW_PROMPT_6482a98809493715_EOF'
+ GH_AW_PROMPT_da0684b6b37876e5_EOF
+ cat << 'GH_AW_PROMPT_da0684b6b37876e5_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_6482a98809493715_EOF
- cat << 'GH_AW_PROMPT_6482a98809493715_EOF'
+ GH_AW_PROMPT_da0684b6b37876e5_EOF
+ cat << 'GH_AW_PROMPT_da0684b6b37876e5_EOF'
{{#runtime-import .github/workflows/duplicate-code-detector.md}}
- GH_AW_PROMPT_6482a98809493715_EOF
+ GH_AW_PROMPT_da0684b6b37876e5_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_ACTOR: ${{ github.actor }}
GH_AW_GITHUB_EVENT_HEAD_COMMIT_ID: ${{ github.event.head_commit.id }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -365,12 +399,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_1b0465e915229ea4_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_782907c9eb8996c1_EOF'
{"create_issue":{"assignees":["copilot"],"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_1b0465e915229ea4_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_782907c9eb8996c1_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_cd54dff48c846a07_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_bd5fd174faec6ee9_EOF'
{
"description_suffixes": {
"create_issue": " CONSTRAINTS: Maximum 1 issue(s) can be created. Assignees [\"copilot\"] will be automatically assigned."
@@ -378,8 +412,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_cd54dff48c846a07_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_0cf6cf7fdf91fb57_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_bd5fd174faec6ee9_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_063a0fcac3505b2c_EOF'
{
"create_issue": {
"defaultMax": 1,
@@ -472,7 +506,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_0cf6cf7fdf91fb57_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_063a0fcac3505b2c_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -539,7 +573,7 @@ jobs:
export GH_AW_ENGINE="codex"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_fed356ec37333618_EOF
+ cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_a54e5b2868dcaa13_EOF
[history]
persistence = "none"
@@ -569,28 +603,22 @@ jobs:
[mcp_servers.serena]
container = "ghcr.io/github/serena-mcp-server:latest"
+ entrypoint = "serena"
+ entrypointArgs = ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"]
+ mounts = ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"]
args = [
"--network",
"host",
]
- entrypoint = "serena"
- entrypointArgs = [
- "start-mcp-server",
- "--context",
- "codex",
- "--project",
- "${GITHUB_WORKSPACE}"
- ]
- mounts = ["${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw"]
[mcp_servers.serena."guard-policies"]
[mcp_servers.serena."guard-policies".write-sink]
accept = ["*"]
- GH_AW_MCP_CONFIG_fed356ec37333618_EOF
+ GH_AW_MCP_CONFIG_a54e5b2868dcaa13_EOF
# Generate JSON config for MCP gateway
- cat << GH_AW_MCP_CONFIG_fed356ec37333618_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_a54e5b2868dcaa13_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -623,28 +651,31 @@ jobs:
}
},
"serena": {
- "container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
- "entrypoint": "serena",
- "entrypointArgs": [
- "start-mcp-server",
- "--context",
- "codex",
- "--project",
- "\${GITHUB_WORKSPACE}"
- ],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
- "guard-policies": {
- "write-sink": {
- "accept": [
- "*"
- ]
- }
+ "type": "stdio",
+ "container": "ghcr.io/github/serena-mcp-server:latest",
+ "entrypoint": "serena",
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "guard-policies": {
+ "write-sink": {
+ "accept": [
+ "*"
+ ]
}
}
+ }
},
"gateway": {
"port": $MCP_GATEWAY_PORT,
@@ -653,7 +684,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_fed356ec37333618_EOF
+ GH_AW_MCP_CONFIG_a54e5b2868dcaa13_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/glossary-maintainer.lock.yml b/.github/workflows/glossary-maintainer.lock.yml
index 38b65db75b..39d8ac66aa 100644
--- a/.github/workflows/glossary-maintainer.lock.yml
+++ b/.github/workflows/glossary-maintainer.lock.yml
@@ -28,8 +28,9 @@
# - ../skills/documentation/SKILL.md
# - shared/mcp/qmd-docs.md
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"8af546f974f5fa00a35c467729e68b9337f67f77c54f180891f1b434e6a7fd1e","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"eca7d351932b1292599e078d9d2ba0be9423450d6f00bd7dc0573f715e2b7f7b","strict":true,"agent_id":"copilot"}
name: "Glossary Maintainer"
"on":
@@ -139,9 +140,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
- GH_AW_PROMPT_6c0408faa0c49913_EOF
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -149,12 +150,12 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/repo_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
Tools: create_pull_request, missing_tool, missing_data, noop
- GH_AW_PROMPT_6c0408faa0c49913_EOF
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
The following GitHub context information is available for this workflow:
@@ -184,26 +185,59 @@ jobs:
{{/if}}
- GH_AW_PROMPT_6c0408faa0c49913_EOF
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
- GH_AW_PROMPT_6c0408faa0c49913_EOF
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
{{#runtime-import .github/skills/documentation/SKILL.md}}
- GH_AW_PROMPT_6c0408faa0c49913_EOF
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
{{#runtime-import .github/agents/technical-doc-writer.agent.md}}
- GH_AW_PROMPT_6c0408faa0c49913_EOF
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_6c0408faa0c49913_EOF
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
{{#runtime-import .github/workflows/shared/mcp/qmd-docs.md}}
- GH_AW_PROMPT_6c0408faa0c49913_EOF
- cat << 'GH_AW_PROMPT_6c0408faa0c49913_EOF'
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
+ cat << 'GH_AW_PROMPT_46ef56a9e5df78e3_EOF'
{{#runtime-import .github/workflows/glossary-maintainer.md}}
- GH_AW_PROMPT_6c0408faa0c49913_EOF
+ GH_AW_PROMPT_46ef56a9e5df78e3_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
@@ -431,12 +465,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_473595ab462c5a89_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_de03f03deb126789_EOF'
{"create_pull_request":{"draft":false,"expires":48,"labels":["documentation","glossary"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS"],"protected_path_prefixes":[".github/",".agents/"],"title_prefix":"[docs] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_repo_memory":{"memories":[{"dir":"/tmp/gh-aw/repo-memory/default","id":"default","max_file_count":100,"max_file_size":10240,"max_patch_size":10240}]}}
- GH_AW_SAFE_OUTPUTS_CONFIG_473595ab462c5a89_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_de03f03deb126789_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_7d6b442c65049ea7_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_1e12d904f599aa11_EOF'
{
"description_suffixes": {
"create_pull_request": " CONSTRAINTS: Maximum 1 pull request(s) can be created. Title will be prefixed with \"[docs] \". Labels [\"documentation\" \"glossary\"] will be automatically added."
@@ -444,8 +478,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_7d6b442c65049ea7_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_94ee931f21436a63_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_1e12d904f599aa11_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_3cece48557b30fd7_EOF'
{
"create_pull_request": {
"defaultMax": 1,
@@ -541,7 +575,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_94ee931f21436a63_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_3cece48557b30fd7_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -643,7 +677,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_7c7953d959177882_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_61e9af9c2220cfb4_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -690,10 +724,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -710,7 +758,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_7c7953d959177882_EOF
+ GH_AW_MCP_CONFIG_61e9af9c2220cfb4_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/go-fan.lock.yml b/.github/workflows/go-fan.lock.yml
index a89634dd3d..82428fbc26 100644
--- a/.github/workflows/go-fan.lock.yml
+++ b/.github/workflows/go-fan.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"3ca391548ba08c8a271413f4cf5a5ec319865e7da8f0a921a2d070743534688d","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"a0cfeca82bc472d0f9c93528021e5c3fec79201341ef88f80027cae3fcd0b580","strict":true,"agent_id":"claude"}
name: "Go Fan"
"on":
@@ -137,15 +138,15 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_03ae4af2fc279179_EOF'
+ cat << 'GH_AW_PROMPT_10cc16000f42ce03_EOF'
- GH_AW_PROMPT_03ae4af2fc279179_EOF
+ GH_AW_PROMPT_10cc16000f42ce03_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_03ae4af2fc279179_EOF'
+ cat << 'GH_AW_PROMPT_10cc16000f42ce03_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -177,28 +178,61 @@ jobs:
{{/if}}
- GH_AW_PROMPT_03ae4af2fc279179_EOF
+ GH_AW_PROMPT_10cc16000f42ce03_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_03ae4af2fc279179_EOF'
+ cat << 'GH_AW_PROMPT_10cc16000f42ce03_EOF'
- GH_AW_PROMPT_03ae4af2fc279179_EOF
- cat << 'GH_AW_PROMPT_03ae4af2fc279179_EOF'
+ GH_AW_PROMPT_10cc16000f42ce03_EOF
+ cat << 'GH_AW_PROMPT_10cc16000f42ce03_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_10cc16000f42ce03_EOF
+ cat << 'GH_AW_PROMPT_10cc16000f42ce03_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_03ae4af2fc279179_EOF
- cat << 'GH_AW_PROMPT_03ae4af2fc279179_EOF'
+ GH_AW_PROMPT_10cc16000f42ce03_EOF
+ cat << 'GH_AW_PROMPT_10cc16000f42ce03_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_03ae4af2fc279179_EOF
- cat << 'GH_AW_PROMPT_03ae4af2fc279179_EOF'
+ GH_AW_PROMPT_10cc16000f42ce03_EOF
+ cat << 'GH_AW_PROMPT_10cc16000f42ce03_EOF'
{{#runtime-import .github/workflows/go-fan.md}}
- GH_AW_PROMPT_03ae4af2fc279179_EOF
+ GH_AW_PROMPT_10cc16000f42ce03_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -377,12 +411,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_b509dbcc1087d2fd_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_d187e70f299a323d_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1,"title_prefix":"[go-fan] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_b509dbcc1087d2fd_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_d187e70f299a323d_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_aae6cab5dd200833_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_30735c022c1c7683_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Title will be prefixed with \"[go-fan] \". Discussions will be created in category \"audits\"."
@@ -390,8 +424,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_aae6cab5dd200833_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_b4ef947c2fb6dd42_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_30735c022c1c7683_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_d8ecf417956850d1_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -477,7 +511,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_b4ef947c2fb6dd42_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_d8ecf417956850d1_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -544,7 +578,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_8f054b0401443d1f_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_eb27b7f850ecef79_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -577,11 +611,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -590,7 +621,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -607,7 +644,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_8f054b0401443d1f_EOF
+ GH_AW_MCP_CONFIG_eb27b7f850ecef79_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -708,13 +745,14 @@ jobs:
# - mcp__github__search_pull_requests
# - mcp__github__search_repositories
# - mcp__github__search_users
+ # - mcp__serena
timeout-minutes: 30
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
# shellcheck disable=SC1003
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,go.dev,golang.org,goproxy.io,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pkg.go.dev,playwright.download.prss.microsoft.com,ppa.launchpad.net,proxy.golang.org,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,storage.googleapis.com,sum.golang.org,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.1 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat go.mod),Bash(cat go.sum),Bash(cat scratchpad/mods/*),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''\'\'''\''*.go'\''\'\'''\''),Bash(find scratchpad/mods/ -maxdepth 1 -ls),Bash(go list -m all),Bash(grep -r '\''\'\'''\''import'\''\'\'''\'' --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat go.mod),Bash(cat go.sum),Bash(cat scratchpad/mods/*),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''\'\'''\''*.go'\''\'\'''\''),Bash(find scratchpad/mods/ -maxdepth 1 -ls),Bash(go list -m all),Bash(grep -r '\''\'\'''\''import'\''\'\'''\'' --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__serena'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
diff --git a/.github/workflows/jsweep.lock.yml b/.github/workflows/jsweep.lock.yml
index a0fc3094ba..9a2c063982 100644
--- a/.github/workflows/jsweep.lock.yml
+++ b/.github/workflows/jsweep.lock.yml
@@ -22,7 +22,11 @@
#
# Daily JavaScript unbloater that cleans one .cjs file per day, prioritizing files with @ts-nocheck to enable type checking
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"dc09f089e83a388addcd6b2edd270cdc33c2d32a65b5a030235a202c1381aba4","strict":true,"agent_id":"copilot"}
+# Resolved workflow manifest:
+# Imports:
+# - shared/mcp/serena.md
+#
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"dd86446c86ebaaeba36e9e964665af7108fbb2b5fad335f400c2378bbf43fc68","strict":true,"agent_id":"copilot"}
name: "jsweep - JavaScript Unbloater"
"on":
@@ -133,20 +137,20 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_2fde2e55431a54da_EOF'
+ cat << 'GH_AW_PROMPT_eac1fdf49c0156e5_EOF'
- GH_AW_PROMPT_2fde2e55431a54da_EOF
+ GH_AW_PROMPT_eac1fdf49c0156e5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_2fde2e55431a54da_EOF'
+ cat << 'GH_AW_PROMPT_eac1fdf49c0156e5_EOF'
Tools: create_pull_request, missing_tool, missing_data, noop
- GH_AW_PROMPT_2fde2e55431a54da_EOF
+ GH_AW_PROMPT_eac1fdf49c0156e5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
- cat << 'GH_AW_PROMPT_2fde2e55431a54da_EOF'
+ cat << 'GH_AW_PROMPT_eac1fdf49c0156e5_EOF'
The following GitHub context information is available for this workflow:
@@ -176,19 +180,53 @@ jobs:
{{/if}}
- GH_AW_PROMPT_2fde2e55431a54da_EOF
+ GH_AW_PROMPT_eac1fdf49c0156e5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_2fde2e55431a54da_EOF'
+ cat << 'GH_AW_PROMPT_eac1fdf49c0156e5_EOF'
- GH_AW_PROMPT_2fde2e55431a54da_EOF
- cat << 'GH_AW_PROMPT_2fde2e55431a54da_EOF'
+ GH_AW_PROMPT_eac1fdf49c0156e5_EOF
+ cat << 'GH_AW_PROMPT_eac1fdf49c0156e5_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["typescript"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_eac1fdf49c0156e5_EOF
+ cat << 'GH_AW_PROMPT_eac1fdf49c0156e5_EOF'
{{#runtime-import .github/workflows/jsweep.md}}
- GH_AW_PROMPT_2fde2e55431a54da_EOF
+ GH_AW_PROMPT_eac1fdf49c0156e5_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
with:
@@ -376,12 +414,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_b59d5fd264bdffa8_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_8510f70dd7d25452_EOF'
{"create_pull_request":{"draft":true,"expires":48,"if_no_changes":"ignore","labels":["unbloat","automation"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS"],"protected_path_prefixes":[".github/",".agents/"],"title_prefix":"[jsweep] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_b59d5fd264bdffa8_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_8510f70dd7d25452_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_4004bd9b5187f83b_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_29d0a9219bd724e0_EOF'
{
"description_suffixes": {
"create_pull_request": " CONSTRAINTS: Maximum 1 pull request(s) can be created. Title will be prefixed with \"[jsweep] \". Labels [\"unbloat\" \"automation\"] will be automatically added. PRs will be created as drafts."
@@ -389,8 +427,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_4004bd9b5187f83b_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_1fa81577b0ed70d3_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_29d0a9219bd724e0_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_49ddd9ce11c35cbf_EOF'
{
"create_pull_request": {
"defaultMax": 1,
@@ -486,7 +524,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_1fa81577b0ed70d3_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_49ddd9ce11c35cbf_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -554,7 +592,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_dd01f600fc9ae92c_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_5bef3d8af077c803_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -590,10 +628,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -610,7 +662,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_dd01f600fc9ae92c_EOF
+ GH_AW_MCP_CONFIG_5bef3d8af077c803_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/jsweep.md b/.github/workflows/jsweep.md
index 43a94b1e66..045c635808 100644
--- a/.github/workflows/jsweep.md
+++ b/.github/workflows/jsweep.md
@@ -13,8 +13,11 @@ engine: copilot
runtimes:
node:
version: "20"
+imports:
+ - uses: shared/mcp/serena.md
+ with:
+ languages: ["typescript"]
tools:
- serena: ["typescript"]
github:
toolsets: [default]
edit:
diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml
index 37fcfd40ad..f331f30403 100644
--- a/.github/workflows/mcp-inspector.lock.yml
+++ b/.github/workflows/mcp-inspector.lock.yml
@@ -36,12 +36,13 @@
# - shared/mcp/notion.md
# - shared/mcp/sentry.md
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/mcp/server-memory.md
# - shared/mcp/slack.md
# - shared/mcp/tavily.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"e92fc7f19a13329f2f521f2c3ade949e9a30c1bd31c9752c012a660be935c8a8","agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"bc87959fa3f38fe5f9ecc86014949784ba0d27396b04c0ce1cfd700db863e173","agent_id":"copilot"}
name: "MCP Inspector Agent"
"on":
@@ -152,16 +153,16 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
- GH_AW_PROMPT_3d8605826642c81d_EOF
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -193,67 +194,101 @@ jobs:
{{/if}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/arxiv.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/ast-grep.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/brave.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/context7.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/datadog.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/deepwiki.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/fabric-rti.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/markitdown.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/microsoft-docs.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/notion.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/sentry.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/server-memory.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/slack.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/tavily.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
- cat << 'GH_AW_PROMPT_3d8605826642c81d_EOF'
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
+ cat << 'GH_AW_PROMPT_64acc2c4c76a6b47_EOF'
{{#runtime-import .github/workflows/mcp-inspector.md}}
- GH_AW_PROMPT_3d8605826642c81d_EOF
+ GH_AW_PROMPT_64acc2c4c76a6b47_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -495,12 +530,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_575449c39d4a0db6_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_8c1bc9c5e8b1168b_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"notion-add-comment":{"description":"Add a comment to a Notion page","inputs":{"comment":{"default":null,"description":"The comment text to add","required":true,"type":"string"}},"output":"Comment added to Notion successfully!"},"post-to-slack-channel":{"description":"Post a message to a Slack channel. Message must be 200 characters or less. Supports basic Slack markdown: *bold*, _italic_, ~strike~, `code`, ```code block```, \u003equote, and links \u003curl|text\u003e. Requires GH_AW_SLACK_CHANNEL_ID environment variable to be set.","inputs":{"message":{"default":null,"description":"The message to post (max 200 characters, supports Slack markdown)","required":true,"type":"string"}},"output":"Message posted to Slack successfully!"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_575449c39d4a0db6_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_8c1bc9c5e8b1168b_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_3f81a901d13d3222_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_d46755b0b109493c_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Discussions will be created in category \"audits\"."
@@ -543,8 +578,8 @@ jobs:
}
]
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_3f81a901d13d3222_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_e0aba99a36b7352c_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_d46755b0b109493c_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_419f6ce4cbde1c5a_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -630,7 +665,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_e0aba99a36b7352c_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_419f6ce4cbde1c5a_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -712,7 +747,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e AZURE_CLIENT_ID -e AZURE_CLIENT_SECRET -e AZURE_TENANT_ID -e BRAVE_API_KEY -e CONTEXT7_API_KEY -e DD_API_KEY -e DD_APPLICATION_KEY -e DD_SITE -e NOTION_API_TOKEN -e SENTRY_ACCESS_TOKEN -e SENTRY_HOST -e SENTRY_OPENAI_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_a19e960e89f53e18_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_31f7cae84f3da191_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"agenticworkflows": {
@@ -1017,10 +1052,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -1057,7 +1106,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_a19e960e89f53e18_EOF
+ GH_AW_MCP_CONFIG_31f7cae84f3da191_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/q.lock.yml b/.github/workflows/q.lock.yml
index 48dc06aefd..3eb648a4a0 100644
--- a/.github/workflows/q.lock.yml
+++ b/.github/workflows/q.lock.yml
@@ -25,8 +25,9 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"609515be5110d6a17b7ad026f4d597ae062e31fd08db93c60194846614f83f2f","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"414f4c7ae0fd2681b02648ca35945a676d382a50fc17081df6cbca04288663bc","strict":true,"agent_id":"copilot"}
name: "Q"
"on":
@@ -205,21 +206,21 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_e431ebb7cc39ad6f_EOF'
+ cat << 'GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF'
- GH_AW_PROMPT_e431ebb7cc39ad6f_EOF
+ GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_e431ebb7cc39ad6f_EOF'
+ cat << 'GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF'
Tools: add_comment, create_pull_request, add_labels, missing_tool, missing_data, noop
- GH_AW_PROMPT_e431ebb7cc39ad6f_EOF
+ GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
- cat << 'GH_AW_PROMPT_e431ebb7cc39ad6f_EOF'
+ cat << 'GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF'
The following GitHub context information is available for this workflow:
@@ -249,25 +250,59 @@ jobs:
{{/if}}
- GH_AW_PROMPT_e431ebb7cc39ad6f_EOF
+ GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then
cat "${RUNNER_TEMP}/gh-aw/prompts/pr_context_prompt.md"
fi
- cat << 'GH_AW_PROMPT_e431ebb7cc39ad6f_EOF'
+ cat << 'GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF'
- GH_AW_PROMPT_e431ebb7cc39ad6f_EOF
- cat << 'GH_AW_PROMPT_e431ebb7cc39ad6f_EOF'
+ GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF
+ cat << 'GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF
+ cat << 'GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_e431ebb7cc39ad6f_EOF
- cat << 'GH_AW_PROMPT_e431ebb7cc39ad6f_EOF'
+ GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF
+ cat << 'GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF'
{{#runtime-import .github/workflows/q.md}}
- GH_AW_PROMPT_e431ebb7cc39ad6f_EOF
+ GH_AW_PROMPT_2f5c35e0c2fdd0d5_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_ACTOR: ${{ github.actor }}
GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }}
GH_AW_EXPR_799BE623: ${{ github.event.issue.number || github.event.pull_request.number }}
@@ -510,12 +545,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_1524572f3edb2feb_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_571e0d712d82a510_EOF'
{"add_comment":{"max":1},"add_labels":{"allowed":["spam"]},"create_pull_request":{"draft":false,"expires":48,"if_no_changes":"ignore","labels":["automation","workflow-optimization"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS"],"protected_files_policy":"fallback-to-issue","protected_path_prefixes":[".github/",".agents/"],"reviewers":["copilot"],"title_prefix":"[q] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_1524572f3edb2feb_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_571e0d712d82a510_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_983c1bdaae5a9f67_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_f801a602906e4d7c_EOF'
{
"description_suffixes": {
"add_comment": " CONSTRAINTS: Maximum 1 comment(s) can be added.",
@@ -525,8 +560,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_983c1bdaae5a9f67_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_5d130506476f98a2_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_f801a602906e4d7c_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_a78a9c422dba3682_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -659,7 +694,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_5d130506476f98a2_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_a78a9c422dba3682_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -726,7 +761,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_087402ebcdc3402e_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_09148f3e7eec75e2_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"agenticworkflows": {
@@ -783,10 +818,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -803,7 +852,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_087402ebcdc3402e_EOF
+ GH_AW_MCP_CONFIG_09148f3e7eec75e2_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/repository-quality-improver.lock.yml b/.github/workflows/repository-quality-improver.lock.yml
index 16719303e9..bd6cd6ea37 100644
--- a/.github/workflows/repository-quality-improver.lock.yml
+++ b/.github/workflows/repository-quality-improver.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"6bb1a0d4a4a46eebdfbddfe915d6345a17575dd0489817be13e208c9921460da","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"22c747872e9dd8339a43a9fe92e851adb398bed72690aeba5d22fb96e1b62b78","strict":true,"agent_id":"copilot"}
name: "Repository Quality Improvement Agent"
"on":
@@ -137,15 +138,15 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_3fb8e53f2574faa5_EOF'
+ cat << 'GH_AW_PROMPT_85d0ce5671dbc2a7_EOF'
- GH_AW_PROMPT_3fb8e53f2574faa5_EOF
+ GH_AW_PROMPT_85d0ce5671dbc2a7_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt_multi.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_3fb8e53f2574faa5_EOF'
+ cat << 'GH_AW_PROMPT_85d0ce5671dbc2a7_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -177,25 +178,59 @@ jobs:
{{/if}}
- GH_AW_PROMPT_3fb8e53f2574faa5_EOF
+ GH_AW_PROMPT_85d0ce5671dbc2a7_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_3fb8e53f2574faa5_EOF'
+ cat << 'GH_AW_PROMPT_85d0ce5671dbc2a7_EOF'
- GH_AW_PROMPT_3fb8e53f2574faa5_EOF
- cat << 'GH_AW_PROMPT_3fb8e53f2574faa5_EOF'
+ GH_AW_PROMPT_85d0ce5671dbc2a7_EOF
+ cat << 'GH_AW_PROMPT_85d0ce5671dbc2a7_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_85d0ce5671dbc2a7_EOF
+ cat << 'GH_AW_PROMPT_85d0ce5671dbc2a7_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_3fb8e53f2574faa5_EOF
- cat << 'GH_AW_PROMPT_3fb8e53f2574faa5_EOF'
+ GH_AW_PROMPT_85d0ce5671dbc2a7_EOF
+ cat << 'GH_AW_PROMPT_85d0ce5671dbc2a7_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_3fb8e53f2574faa5_EOF
- cat << 'GH_AW_PROMPT_3fb8e53f2574faa5_EOF'
+ GH_AW_PROMPT_85d0ce5671dbc2a7_EOF
+ cat << 'GH_AW_PROMPT_85d0ce5671dbc2a7_EOF'
{{#runtime-import .github/workflows/repository-quality-improver.md}}
- GH_AW_PROMPT_3fb8e53f2574faa5_EOF
+ GH_AW_PROMPT_85d0ce5671dbc2a7_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
@@ -374,12 +409,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_d5ea1e3bf145e43c_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_e2d4975556414d20_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_d5ea1e3bf145e43c_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_e2d4975556414d20_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_72d77390da1f4e41_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_8740b736f711cf2d_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Discussions will be created in category \"audits\"."
@@ -387,8 +422,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_72d77390da1f4e41_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_c1d76e4147c117f5_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_8740b736f711cf2d_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_64b504c66344b51e_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -474,7 +509,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_c1d76e4147c117f5_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_64b504c66344b51e_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -542,7 +577,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_702f79030a5842c7_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_7d1539ffd79d8d6f_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -578,10 +613,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -598,7 +647,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_702f79030a5842c7_EOF
+ GH_AW_MCP_CONFIG_7d1539ffd79d8d6f_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/semantic-function-refactor.lock.yml b/.github/workflows/semantic-function-refactor.lock.yml
index 109be24f83..51e42dfcd2 100644
--- a/.github/workflows/semantic-function-refactor.lock.yml
+++ b/.github/workflows/semantic-function-refactor.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"58b9c667bf6db3acec9c27027d1346b4cd6bb700b508ed533569938971852631","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"8a68127b1888587b0445b5b055dc37c7d3f24a62e752d4978ae7ce624f15e646","strict":true,"agent_id":"claude"}
name: "Semantic Function Refactoring"
"on":
@@ -138,14 +139,14 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_a1d3498867db9642_EOF'
+ cat << 'GH_AW_PROMPT_536364e5be7123c1_EOF'
- GH_AW_PROMPT_a1d3498867db9642_EOF
+ GH_AW_PROMPT_536364e5be7123c1_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_a1d3498867db9642_EOF'
+ cat << 'GH_AW_PROMPT_536364e5be7123c1_EOF'
Tools: create_issue, close_issue(max:10), missing_tool, missing_data, noop
@@ -177,27 +178,60 @@ jobs:
{{/if}}
- GH_AW_PROMPT_a1d3498867db9642_EOF
+ GH_AW_PROMPT_536364e5be7123c1_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_a1d3498867db9642_EOF'
+ cat << 'GH_AW_PROMPT_536364e5be7123c1_EOF'
- GH_AW_PROMPT_a1d3498867db9642_EOF
- cat << 'GH_AW_PROMPT_a1d3498867db9642_EOF'
+ GH_AW_PROMPT_536364e5be7123c1_EOF
+ cat << 'GH_AW_PROMPT_536364e5be7123c1_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_536364e5be7123c1_EOF
+ cat << 'GH_AW_PROMPT_536364e5be7123c1_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_a1d3498867db9642_EOF
- cat << 'GH_AW_PROMPT_a1d3498867db9642_EOF'
+ GH_AW_PROMPT_536364e5be7123c1_EOF
+ cat << 'GH_AW_PROMPT_536364e5be7123c1_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_a1d3498867db9642_EOF
- cat << 'GH_AW_PROMPT_a1d3498867db9642_EOF'
+ GH_AW_PROMPT_536364e5be7123c1_EOF
+ cat << 'GH_AW_PROMPT_536364e5be7123c1_EOF'
{{#runtime-import .github/workflows/semantic-function-refactor.md}}
- GH_AW_PROMPT_a1d3498867db9642_EOF
+ GH_AW_PROMPT_536364e5be7123c1_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -359,12 +393,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_fe04f7e6a642c2b9_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_b3f4a9d9742d8359_EOF'
{"close_issue":{"max":10,"required_title_prefix":"[refactor] ","target":"*"},"create_issue":{"expires":48,"labels":["refactoring","code-quality","automated-analysis","cookie"],"max":1,"title_prefix":"[refactor] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_fe04f7e6a642c2b9_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_b3f4a9d9742d8359_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_7deb80b8814fd86e_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_d31c203c6a3784c5_EOF'
{
"description_suffixes": {
"close_issue": " CONSTRAINTS: Maximum 10 issue(s) can be closed. Target: *. Only issues with title prefix \"[refactor] \" can be closed.",
@@ -373,8 +407,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_7deb80b8814fd86e_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_cac13f66ec9e07ec_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_d31c203c6a3784c5_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_6c95e676fc09688d_EOF'
{
"close_issue": {
"defaultMax": 1,
@@ -485,7 +519,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_cac13f66ec9e07ec_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_6c95e676fc09688d_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -552,7 +586,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_d8ec3b7b468994d5_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_b48daadd1b2295df_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -585,11 +619,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -598,7 +629,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -615,7 +652,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_d8ec3b7b468994d5_EOF
+ GH_AW_MCP_CONFIG_b48daadd1b2295df_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -713,13 +750,14 @@ jobs:
# - mcp__github__search_pull_requests
# - mcp__github__search_repositories
# - mcp__github__search_users
+ # - mcp__serena
timeout-minutes: 20
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
# shellcheck disable=SC1003
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.1 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\'' -type f),Bash(find pkg -type f -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\''),Bash(find pkg/ -maxdepth 1 -ls),Bash(find pkg/workflow/ -maxdepth 1 -ls),Bash(grep -r '\''\'\'''\''func '\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head -n * pkg/**/*.go),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\'' -type f),Bash(find pkg -type f -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\''),Bash(find pkg/ -maxdepth 1 -ls),Bash(find pkg/workflow/ -maxdepth 1 -ls),Bash(grep -r '\''\'\'''\''func '\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head -n * pkg/**/*.go),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__serena'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
diff --git a/.github/workflows/sergo.lock.yml b/.github/workflows/sergo.lock.yml
index de79d829db..c70c85e1f5 100644
--- a/.github/workflows/sergo.lock.yml
+++ b/.github/workflows/sergo.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"02bf772df769e1b8dcee8170592780e7f1203860768210b7bf353632372b9d86","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"5aee3d458efb1579e66dd4f560c29dc760203ab1c9195ef48ad0251472b6a524","strict":true,"agent_id":"claude"}
name: "Sergo - Serena Go Expert"
"on":
@@ -138,15 +139,15 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_6d2f3b06e9709fad_EOF'
+ cat << 'GH_AW_PROMPT_352c17e092b55108_EOF'
- GH_AW_PROMPT_6d2f3b06e9709fad_EOF
+ GH_AW_PROMPT_352c17e092b55108_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_6d2f3b06e9709fad_EOF'
+ cat << 'GH_AW_PROMPT_352c17e092b55108_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -178,25 +179,59 @@ jobs:
{{/if}}
- GH_AW_PROMPT_6d2f3b06e9709fad_EOF
+ GH_AW_PROMPT_352c17e092b55108_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_6d2f3b06e9709fad_EOF'
+ cat << 'GH_AW_PROMPT_352c17e092b55108_EOF'
- GH_AW_PROMPT_6d2f3b06e9709fad_EOF
- cat << 'GH_AW_PROMPT_6d2f3b06e9709fad_EOF'
+ GH_AW_PROMPT_352c17e092b55108_EOF
+ cat << 'GH_AW_PROMPT_352c17e092b55108_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_352c17e092b55108_EOF
+ cat << 'GH_AW_PROMPT_352c17e092b55108_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_6d2f3b06e9709fad_EOF
- cat << 'GH_AW_PROMPT_6d2f3b06e9709fad_EOF'
+ GH_AW_PROMPT_352c17e092b55108_EOF
+ cat << 'GH_AW_PROMPT_352c17e092b55108_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_6d2f3b06e9709fad_EOF
- cat << 'GH_AW_PROMPT_6d2f3b06e9709fad_EOF'
+ GH_AW_PROMPT_352c17e092b55108_EOF
+ cat << 'GH_AW_PROMPT_352c17e092b55108_EOF'
{{#runtime-import .github/workflows/sergo.md}}
- GH_AW_PROMPT_6d2f3b06e9709fad_EOF
+ GH_AW_PROMPT_352c17e092b55108_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
with:
@@ -377,12 +412,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_ad8c651a14af7b8c_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_2c3f4557be74f075_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1,"title_prefix":"[sergo] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_ad8c651a14af7b8c_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_2c3f4557be74f075_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_8c77fc764e2d4d84_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_1405630b13fc1ea4_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Title will be prefixed with \"[sergo] \". Discussions will be created in category \"audits\"."
@@ -390,8 +425,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_8c77fc764e2d4d84_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_b065898035d24446_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_1405630b13fc1ea4_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_3a3a31529010dfe9_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -477,7 +512,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_b065898035d24446_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_3a3a31529010dfe9_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -544,7 +579,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_a7ade21eedbddadb_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_0eb4f4cd9e93dd6f_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -577,11 +612,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -590,7 +622,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -607,7 +645,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_a7ade21eedbddadb_EOF
+ GH_AW_MCP_CONFIG_0eb4f4cd9e93dd6f_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -707,13 +745,14 @@ jobs:
# - mcp__github__search_pull_requests
# - mcp__github__search_repositories
# - mcp__github__search_users
+ # - mcp__serena
timeout-minutes: 45
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
# shellcheck disable=SC1003
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,go.dev,golang.org,goproxy.io,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pkg.go.dev,playwright.download.prss.microsoft.com,ppa.launchpad.net,proxy.golang.org,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,storage.googleapis.com,sum.golang.org,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.1 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat go.mod),Bash(cat go.sum),Bash(cat),Bash(date),Bash(echo),Bash(find . -name '\''\'\'''\''*.go'\''\'\'''\'' -type f),Bash(go list -m all),Bash(grep -r '\''\'\'''\''func '\''\'\'''\'' --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat go.mod),Bash(cat go.sum),Bash(cat),Bash(date),Bash(echo),Bash(find . -name '\''\'\'''\''*.go'\''\'\'''\'' -type f),Bash(go list -m all),Bash(grep -r '\''\'\'''\''func '\''\'\'''\'' --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__serena'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
diff --git a/.github/workflows/shared/mcp/serena-go.md b/.github/workflows/shared/mcp/serena-go.md
index 9eec240306..f6c656c9d6 100644
--- a/.github/workflows/shared/mcp/serena-go.md
+++ b/.github/workflows/shared/mcp/serena-go.md
@@ -14,8 +14,10 @@
# imports:
# - shared/mcp/serena-go.md
-tools:
- serena: ["go"]
+imports:
+ - uses: shared/mcp/serena.md
+ with:
+ languages: ["go"]
---
## Serena Go Code Analysis
diff --git a/.github/workflows/shared/mcp/serena.md b/.github/workflows/shared/mcp/serena.md
new file mode 100644
index 0000000000..1ba26a09c2
--- /dev/null
+++ b/.github/workflows/shared/mcp/serena.md
@@ -0,0 +1,74 @@
+---
+# Serena MCP Server - Multi-Language Code Analysis
+# Language Server Protocol (LSP)-based tool for deep semantic code analysis.
+# Supports 30+ languages through per-language LSP integration.
+#
+# Documentation: https://github.com/oraios/serena
+#
+# Usage:
+# imports:
+# - uses: shared/mcp/serena.md
+# with:
+# languages: ["go"] # one language
+# languages: ["go", "typescript"] # multiple languages
+# languages: ["typescript", "python"] # with node/python runtimes
+#
+# The 'languages' input configures the Serena MCP server language list.
+
+import-schema:
+ languages:
+ type: array
+ items:
+ type: string
+ required: true
+ description: >
+ List of programming language identifiers to enable for Serena LSP analysis.
+ Supported values include: go, typescript, javascript, python, rust, java,
+ ruby, csharp, cpp, c, kotlin, scala, swift, php, and more.
+
+mcp-servers:
+ serena:
+ container: "ghcr.io/github/serena-mcp-server:latest"
+ args:
+ - "--network"
+ - "host"
+ entrypoint: "serena"
+ entrypointArgs:
+ - "start-mcp-server"
+ - "--context"
+ - "codex"
+ - "--project"
+ - \${GITHUB_WORKSPACE}
+ mounts:
+ - \${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw
+---
+
+## Serena Code Analysis
+
+The Serena MCP server is configured for **${{ github.aw.import-inputs.languages }}** analysis in this workspace:
+- **Workspace**: `${{ github.workspace }}`
+- **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+### Project Activation
+
+Before analyzing code, activate the Serena project:
+```
+Tool: activate_project
+Args: { "path": "${{ github.workspace }}" }
+```
+
+### Available Capabilities
+
+Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+- **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+- **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+- **Type info**: `get_symbol_documentation` — hover-level type and doc information
+- **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+- **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+### Analysis Guidelines
+
+1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+2. **Activate project first** — always call `activate_project` before other tools
+3. **Cross-reference findings** — validate with multiple tools for accuracy
+4. **Focus on the relevant language files** — ignore unrelated file types
diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml
index 85dae88ecc..0fda835452 100644
--- a/.github/workflows/smoke-claude.lock.yml
+++ b/.github/workflows/smoke-claude.lock.yml
@@ -29,12 +29,14 @@
# - shared/github-queries-mcp-script.md
# - shared/go-make.md
# - shared/mcp-pagination.md
+# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/mcp/tavily.md
# - shared/reporting.md
#
# inlined-imports: true
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"dc941381265e093b00d8275efca5b636272772d659e2caf2cea881b5362f4678","agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"2ef75d6200432160e65a96d4e00d7e0cbaccf18c52517c7998aa8194b4dbc57a","agent_id":"claude"}
name: "Smoke Claude"
"on":
@@ -198,9 +200,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
+ GH_AW_PROMPT_bebbbce83b966562_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -208,12 +210,12 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
Tools: add_comment(max:2), create_issue, close_pull_request, update_pull_request, create_pull_request_review_comment(max:5), submit_pull_request_review, resolve_pull_request_review_thread(max:5), add_labels, add_reviewer(max:2), push_to_pull_request_branch, missing_tool, missing_data, noop
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
+ GH_AW_PROMPT_bebbbce83b966562_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_push_to_pr_branch.md"
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
The following GitHub context information is available for this workflow:
@@ -243,12 +245,45 @@ jobs:
{{/if}}
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
+ GH_AW_PROMPT_bebbbce83b966562_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
## MCP Response Size Limits
MCP tool responses have a **25,000 token limit**. When GitHub API responses exceed this limit, workflows must retry with pagination parameters, wasting turns and tokens.
@@ -360,8 +395,8 @@ jobs:
This proactive approach eliminates retry loops and reduces token consumption.
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
**IMPORTANT**: Always use the `mcpscripts-gh` tool for GitHub CLI commands instead of running `gh` directly via bash. The `mcpscripts-gh` tool has proper authentication configured with `GITHUB_TOKEN`, while bash commands do not have GitHub CLI authentication by default.
**Correct**:
@@ -378,11 +413,11 @@ jobs:
```
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
## Report Structure Guidelines
### 1. Header Levels
@@ -452,11 +487,11 @@ jobs:
- Format run IDs as links: `[§12345](https://github.com/owner/repo/actions/runs/12345)`
- Include up to 3 most relevant run URLs at end under `**References:**`
- Do NOT add footer attribution (system adds automatically)
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
**IMPORTANT**: Always use the `mcpscripts-go` and `mcpscripts-make` tools for Go and Make commands instead of running them directly via bash. These mcp-script tools provide consistent execution and proper logging.
**Correct**:
@@ -475,11 +510,33 @@ jobs:
```
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
+
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
+ ## Serena Go Code Analysis
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
- cat << 'GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF'
+ The Serena MCP server is configured for Go code analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Analysis Constraints
+
+ 1. **Only analyze `.go` files** — Ignore all other file types
+ 2. **Skip test files** — Never analyze files ending in `_test.go`
+ 3. **Focus on `pkg/` directory** — Primary analysis area
+ 4. **Use Serena for semantic analysis** — Leverage LSP capabilities for deeper insights
+ GH_AW_PROMPT_bebbbce83b966562_EOF
+ cat << 'GH_AW_PROMPT_bebbbce83b966562_EOF'
# Smoke Test: Claude Engine Validation.
**IMPORTANT: Keep all outputs extremely short and concise. Use single-line responses where possible. No verbose explanations.**
@@ -587,16 +644,16 @@ jobs:
{"noop": {"message": "No action needed: [brief explanation of what was analyzed and why]"}}
```
- GH_AW_PROMPT_5b5cf8c4ea949d9a_EOF
+ GH_AW_PROMPT_bebbbce83b966562_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -857,12 +914,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_b21107ea4b46bc70_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_35162a556554808d_EOF'
{"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-claude"]},"add_reviewer":{"max":2,"target":"*"},"close_pull_request":{"max":1,"staged":true},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-claude","expires":2,"group":true,"labels":["automation","testing"],"max":1},"create_pull_request_review_comment":{"max":5,"side":"RIGHT","target":"*"},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"post_slack_message":{"description":"Post a message to a fictitious Slack channel (smoke test only — no real Slack integration)","inputs":{"channel":{"default":"#general","description":"Slack channel name to post to","required":false,"type":"string"},"message":{"description":"Message text to post","required":false,"type":"string"}}},"push_to_pull_request_branch":{"if_no_changes":"warn","max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS"],"protected_path_prefixes":[".github/",".agents/"],"staged":true,"target":"*"},"resolve_pull_request_review_thread":{"max":5},"submit_pull_request_review":{"footer":"always","max":1},"update_pull_request":{"allow_body":true,"allow_title":true,"max":1,"target":"*"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_b21107ea4b46bc70_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_35162a556554808d_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_78850d57a6f3ee6b_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_ad94a1430dea5ccb_EOF'
{
"description_suffixes": {
"add_comment": " CONSTRAINTS: Maximum 2 comment(s) can be added.",
@@ -899,8 +956,8 @@ jobs:
}
]
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_78850d57a6f3ee6b_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_c02dc5a35725a415_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_ad94a1430dea5ccb_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_07d7f0b79e09a963_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -1185,7 +1242,7 @@ jobs:
"customValidation": "requiresOneOf:title,body"
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_c02dc5a35725a415_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_07d7f0b79e09a963_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -1228,7 +1285,7 @@ jobs:
- name: Setup MCP Scripts Config
run: |
mkdir -p ${RUNNER_TEMP}/gh-aw/mcp-scripts/logs
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_ccd0fc6b0f6e80e9_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_5eb6dd086563f7c7_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -1380,8 +1437,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_ccd0fc6b0f6e80e9_EOF
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_d2d62f86d7364b9a_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_5eb6dd086563f7c7_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_443d3c54d4ae10b7_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -1395,12 +1452,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_d2d62f86d7364b9a_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_443d3c54d4ae10b7_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs
- name: Setup MCP Scripts Tool Files
run: |
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_f67559598c935571_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_0fd7056ae522c7a9_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -1411,9 +1468,9 @@ jobs:
echo " token: ${GH_AW_GH_TOKEN:0:6}..."
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_f67559598c935571_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_0fd7056ae522c7a9_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_e102945e6b71afcb_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_4abb3e0f5a9b2a37_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-discussion-query
# Query GitHub discussions with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1548,9 +1605,9 @@ jobs:
EOF
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_e102945e6b71afcb_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_4abb3e0f5a9b2a37_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_b8c7d61ac9725809_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_49dfcb4ad1c2c0d2_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-issue-query
# Query GitHub issues with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1629,9 +1686,9 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_b8c7d61ac9725809_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_49dfcb4ad1c2c0d2_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_a408417e70dfc282_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_fdc998f2fadcb164_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-pr-query
# Query GitHub pull requests with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1716,9 +1773,9 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_a408417e70dfc282_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_fdc998f2fadcb164_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/go.sh << 'GH_AW_MCP_SCRIPTS_SH_GO_262cebed7ecac346_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/go.sh << 'GH_AW_MCP_SCRIPTS_SH_GO_f6edbeb1ebc0ab2e_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: go
# Execute any Go command. This tool is accessible as 'mcpscripts-go'. Provide the full command after 'go' (e.g., args: 'test ./...'). The tool will run: go . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -1729,9 +1786,9 @@ jobs:
go $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GO_262cebed7ecac346_EOF
+ GH_AW_MCP_SCRIPTS_SH_GO_f6edbeb1ebc0ab2e_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/go.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/make.sh << 'GH_AW_MCP_SCRIPTS_SH_MAKE_b3a40908f9536124_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/make.sh << 'GH_AW_MCP_SCRIPTS_SH_MAKE_7cb4a9938463451c_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: make
# Execute any Make target. This tool is accessible as 'mcpscripts-make'. Provide the target name(s) (e.g., args: 'build'). The tool will run: make . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -1741,7 +1798,7 @@ jobs:
echo "make $INPUT_ARGS"
make $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_MAKE_b3a40908f9536124_EOF
+ GH_AW_MCP_SCRIPTS_SH_MAKE_7cb4a9938463451c_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/make.sh
- name: Generate MCP Scripts Server Config
@@ -1814,7 +1871,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_ffcfdd19b250b5b1_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_3246806fca034e9a_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"agenticworkflows": {
@@ -1903,11 +1960,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -1916,7 +1970,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -1950,7 +2010,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_ffcfdd19b250b5b1_EOF
+ GH_AW_MCP_CONFIG_3246806fca034e9a_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -2664,7 +2724,7 @@ jobs:
echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV"
- name: Setup Safe Outputs Custom Scripts
run: |
- cat > ${RUNNER_TEMP}/gh-aw/actions/safe_output_script_post_slack_message.cjs << 'GH_AW_SAFE_OUTPUT_SCRIPT_POST_SLACK_MESSAGE_c30eabcd41f1ac22_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/actions/safe_output_script_post_slack_message.cjs << 'GH_AW_SAFE_OUTPUT_SCRIPT_POST_SLACK_MESSAGE_f4ec5cd65796fe0e_EOF'
// @ts-check
///
// Auto-generated safe-output script handler: post-slack-message
@@ -2684,7 +2744,7 @@ jobs:
}
module.exports = { main };
- GH_AW_SAFE_OUTPUT_SCRIPT_POST_SLACK_MESSAGE_c30eabcd41f1ac22_EOF
+ GH_AW_SAFE_OUTPUT_SCRIPT_POST_SLACK_MESSAGE_f4ec5cd65796fe0e_EOF
- name: Process Safe Outputs
id: process_safe_outputs
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
diff --git a/.github/workflows/smoke-claude.md b/.github/workflows/smoke-claude.md
index 39f661a3c1..b69c46994f 100644
--- a/.github/workflows/smoke-claude.md
+++ b/.github/workflows/smoke-claude.md
@@ -29,6 +29,7 @@ imports:
- shared/github-queries-mcp-script.md
- shared/go-make.md
- shared/github-mcp-app.md
+ - shared/mcp/serena-go.md
network:
allowed:
- defaults
@@ -43,9 +44,6 @@ tools:
edit:
bash:
- "*"
- serena:
- languages:
- go: {}
dependencies:
packages:
- microsoft/apm-sample-package
diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml
index b314801b5b..1bde958e80 100644
--- a/.github/workflows/smoke-codex.lock.yml
+++ b/.github/workflows/smoke-codex.lock.yml
@@ -25,9 +25,11 @@
# Resolved workflow manifest:
# Imports:
# - shared/gh.md
+# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"c118db08aa154b8479cc8c61096fb83e65847d51436d12d3de06731702b72a27","agent_id":"codex"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"f44e72272050fdefc2eaef02af775f7d2b49c900c4c06d1907982cc7a01f4cef","agent_id":"codex"}
name: "Smoke Codex"
"on":
@@ -191,9 +193,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_b40bd4653efaa0e8_EOF'
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
- GH_AW_PROMPT_b40bd4653efaa0e8_EOF
+ GH_AW_PROMPT_874458f22920114c_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -201,7 +203,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/qmd_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_b40bd4653efaa0e8_EOF'
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
Tools: add_comment(max:2), create_issue, add_labels, remove_labels, unassign_from_user, hide_comment(max:5), missing_tool, missing_data, noop
@@ -236,30 +238,66 @@ jobs:
- **Note**: If a branch you need is not in the list above and is not listed as an additional fetched ref, it has NOT been checked out. For private repositories you cannot fetch it without proper authentication. If the branch is required and not available, exit with an error and ask the user to add it to the `fetch:` option of the `checkout:` configuration (e.g., `fetch: ["refs/pulls/open/*"]` for all open PR refs, or `fetch: ["main", "feature/my-branch"]` for specific branches).
- GH_AW_PROMPT_b40bd4653efaa0e8_EOF
+ GH_AW_PROMPT_874458f22920114c_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_b40bd4653efaa0e8_EOF'
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
- GH_AW_PROMPT_b40bd4653efaa0e8_EOF
- cat << 'GH_AW_PROMPT_b40bd4653efaa0e8_EOF'
+ GH_AW_PROMPT_874458f22920114c_EOF
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_874458f22920114c_EOF
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
{{#runtime-import .github/workflows/shared/gh.md}}
- GH_AW_PROMPT_b40bd4653efaa0e8_EOF
- cat << 'GH_AW_PROMPT_b40bd4653efaa0e8_EOF'
+ GH_AW_PROMPT_874458f22920114c_EOF
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_b40bd4653efaa0e8_EOF
- cat << 'GH_AW_PROMPT_b40bd4653efaa0e8_EOF'
+ GH_AW_PROMPT_874458f22920114c_EOF
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
+ {{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
+ GH_AW_PROMPT_874458f22920114c_EOF
+ cat << 'GH_AW_PROMPT_874458f22920114c_EOF'
{{#runtime-import .github/workflows/smoke-codex.md}}
- GH_AW_PROMPT_b40bd4653efaa0e8_EOF
+ GH_AW_PROMPT_874458f22920114c_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -459,12 +497,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_425879ec7be2cda8_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_615d15b4b2d7570c_EOF'
{"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-codex"]},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-codex","expires":2,"labels":["automation","testing"],"max":1},"hide_comment":{"max":5},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"unassign_from_user":{"allowed":["githubactionagent"],"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_425879ec7be2cda8_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_615d15b4b2d7570c_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_7c88f456c6a82c2a_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_b4c7657d8a6271bc_EOF'
{
"description_suffixes": {
"add_comment": " CONSTRAINTS: Maximum 2 comment(s) can be added.",
@@ -497,8 +535,8 @@ jobs:
}
]
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_7c88f456c6a82c2a_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_1e9e6012f03eb790_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_b4c7657d8a6271bc_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_6753d269a2ae138c_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -694,7 +732,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_1e9e6012f03eb790_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_6753d269a2ae138c_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -737,7 +775,7 @@ jobs:
- name: Setup MCP Scripts Config
run: |
mkdir -p ${RUNNER_TEMP}/gh-aw/mcp-scripts/logs
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_b4987cc2bce0ea57_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_0b8dd47d724f414e_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -767,8 +805,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_b4987cc2bce0ea57_EOF
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_a6b2e537f4dd62d8_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_0b8dd47d724f414e_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_c89b26d219d3c447_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -782,12 +820,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_a6b2e537f4dd62d8_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_c89b26d219d3c447_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs
- name: Setup MCP Scripts Tool Files
run: |
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_4f8336ce68b54ac3_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_c9a0a5f099e05dc5_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -798,7 +836,7 @@ jobs:
echo " token: ${GH_AW_GH_TOKEN:0:6}..."
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_4f8336ce68b54ac3_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_c9a0a5f099e05dc5_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh
- name: Generate MCP Scripts Server Config
@@ -902,7 +940,7 @@ jobs:
export GH_AW_ENGINE="codex"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_5640afe34ae747d6_EOF
+ cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_472c3b5e0a01d67e_EOF
[history]
persistence = "none"
@@ -972,19 +1010,13 @@ jobs:
[mcp_servers.serena]
container = "ghcr.io/github/serena-mcp-server:latest"
+ entrypoint = "serena"
+ entrypointArgs = ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"]
+ mounts = ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"]
args = [
"--network",
"host",
]
- entrypoint = "serena"
- entrypointArgs = [
- "start-mcp-server",
- "--context",
- "codex",
- "--project",
- "${GITHUB_WORKSPACE}"
- ]
- mounts = ["${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw"]
[mcp_servers.serena."guard-policies"]
@@ -998,10 +1030,10 @@ jobs:
[mcp_servers.web-fetch."guard-policies".write-sink]
accept = ["*"]
- GH_AW_MCP_CONFIG_5640afe34ae747d6_EOF
+ GH_AW_MCP_CONFIG_472c3b5e0a01d67e_EOF
# Generate JSON config for MCP gateway
- cat << GH_AW_MCP_CONFIG_5640afe34ae747d6_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_472c3b5e0a01d67e_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -1083,27 +1115,30 @@ jobs:
}
},
"serena": {
- "container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
- "entrypoint": "serena",
- "entrypointArgs": [
- "start-mcp-server",
- "--context",
- "codex",
- "--project",
- "\${GITHUB_WORKSPACE}"
- ],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
- "guard-policies": {
- "write-sink": {
- "accept": [
- "*"
- ]
- }
+ "type": "stdio",
+ "container": "ghcr.io/github/serena-mcp-server:latest",
+ "entrypoint": "serena",
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "guard-policies": {
+ "write-sink": {
+ "accept": [
+ "*"
+ ]
}
+ }
},
"web-fetch": {
"container": "mcp/fetch",
@@ -1123,7 +1158,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_5640afe34ae747d6_EOF
+ GH_AW_MCP_CONFIG_472c3b5e0a01d67e_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/smoke-codex.md b/.github/workflows/smoke-codex.md
index 6a01ace431..55b82d739b 100644
--- a/.github/workflows/smoke-codex.md
+++ b/.github/workflows/smoke-codex.md
@@ -18,6 +18,7 @@ strict: false
imports:
- shared/gh.md
- shared/reporting.md
+ - shared/mcp/serena-go.md
network:
allowed:
- defaults
@@ -30,9 +31,6 @@ tools:
edit:
bash:
- "*"
- serena:
- languages:
- go: {}
web-fetch:
qmd:
checkouts:
diff --git a/.github/workflows/smoke-copilot-arm.lock.yml b/.github/workflows/smoke-copilot-arm.lock.yml
index 8965afa2df..5a99410501 100644
--- a/.github/workflows/smoke-copilot-arm.lock.yml
+++ b/.github/workflows/smoke-copilot-arm.lock.yml
@@ -26,9 +26,11 @@
# Imports:
# - shared/gh.md
# - shared/github-queries-mcp-script.md
+# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"1e3fa0d1967816c9f6c6d61c507bcc4f03a9b48d3127cec21d34dcc8c26ce367","agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"2e0f56dd848e004d1382bd59038ab27e5bee664d9e065d8ff1ac2c0a2822cb5c","agent_id":"copilot"}
name: "Smoke Copilot ARM64"
"on":
@@ -189,9 +191,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_80f2c0387fc2e91c_EOF'
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
- GH_AW_PROMPT_80f2c0387fc2e91c_EOF
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -199,7 +201,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_80f2c0387fc2e91c_EOF'
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
Tools: add_comment(max:2), create_issue, create_discussion, create_pull_request_review_comment(max:5), submit_pull_request_review, add_labels, remove_labels, dispatch_workflow, missing_tool, missing_data, noop
@@ -231,32 +233,68 @@ jobs:
{{/if}}
- GH_AW_PROMPT_80f2c0387fc2e91c_EOF
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_80f2c0387fc2e91c_EOF'
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
- GH_AW_PROMPT_80f2c0387fc2e91c_EOF
- cat << 'GH_AW_PROMPT_80f2c0387fc2e91c_EOF'
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
{{#runtime-import .github/workflows/shared/gh.md}}
- GH_AW_PROMPT_80f2c0387fc2e91c_EOF
- cat << 'GH_AW_PROMPT_80f2c0387fc2e91c_EOF'
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_80f2c0387fc2e91c_EOF
- cat << 'GH_AW_PROMPT_80f2c0387fc2e91c_EOF'
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
{{#runtime-import .github/workflows/shared/github-queries-mcp-script.md}}
- GH_AW_PROMPT_80f2c0387fc2e91c_EOF
- cat << 'GH_AW_PROMPT_80f2c0387fc2e91c_EOF'
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
+ {{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
+ cat << 'GH_AW_PROMPT_92d90d05eb285c1a_EOF'
{{#runtime-import .github/workflows/smoke-copilot-arm.md}}
- GH_AW_PROMPT_80f2c0387fc2e91c_EOF
+ GH_AW_PROMPT_92d90d05eb285c1a_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -497,12 +535,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_ff356b30883bf4e1_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_85529be811f3d6c3_EOF'
{"add_comment":{"allowed_repos":["github/gh-aw"],"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-copilot-arm"],"allowed_repos":["github/gh-aw"]},"create_discussion":{"category":"announcements","close_older_discussions":true,"expires":2,"fallback_to_issue":true,"labels":["ai-generated"],"max":1},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-copilot-arm","expires":2,"group":true,"labels":["automation","testing"],"max":1},"create_pull_request_review_comment":{"max":5,"side":"RIGHT"},"dispatch_workflow":{"max":1,"workflow_files":{"haiku-printer":".yml"},"workflows":["haiku-printer"]},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"send-slack-message":{"description":"Send a message to Slack (stub for testing)","inputs":{"message":{"description":"The message to send","required":false,"type":"string"}},"output":"Slack message stub executed!"},"submit_pull_request_review":{"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_ff356b30883bf4e1_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_85529be811f3d6c3_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_fe8745adcc6e117b_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_9f734eca2f53b9b6_EOF'
{
"description_suffixes": {
"add_comment": " CONSTRAINTS: Maximum 2 comment(s) can be added.",
@@ -559,8 +597,8 @@ jobs:
}
]
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_fe8745adcc6e117b_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_56f618f5533299b0_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_9f734eca2f53b9b6_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_644adfb7470314b6_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -790,7 +828,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_56f618f5533299b0_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_644adfb7470314b6_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -833,7 +871,7 @@ jobs:
- name: Setup MCP Scripts Config
run: |
mkdir -p ${RUNNER_TEMP}/gh-aw/mcp-scripts/logs
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_478911be5f8bc942_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_a46742b0121eb7a8_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -949,8 +987,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_478911be5f8bc942_EOF
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_88fb0ea78a834d03_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_a46742b0121eb7a8_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_d2be99050e3d093f_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -964,12 +1002,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_88fb0ea78a834d03_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_d2be99050e3d093f_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs
- name: Setup MCP Scripts Tool Files
run: |
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_450b846535cfab0a_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_ab5aec3ed605bb88_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -980,9 +1018,9 @@ jobs:
echo " token: ${GH_AW_GH_TOKEN:0:6}..."
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_450b846535cfab0a_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_ab5aec3ed605bb88_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_b0680bad683b6951_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_7e7cb1d40a9b56d1_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-discussion-query
# Query GitHub discussions with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1117,9 +1155,9 @@ jobs:
EOF
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_b0680bad683b6951_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_7e7cb1d40a9b56d1_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_6715064ed6769336_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_09c73e930ece2e59_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-issue-query
# Query GitHub issues with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1198,9 +1236,9 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_6715064ed6769336_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_09c73e930ece2e59_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_8ec5361963561511_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_54d9d9ebed873f30_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-pr-query
# Query GitHub pull requests with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1285,7 +1323,7 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_8ec5361963561511_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_54d9d9ebed873f30_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh
- name: Generate MCP Scripts Server Config
@@ -1358,7 +1396,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_11360a702a194984_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_044aa7bc4fb54af5_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"agenticworkflows": {
@@ -1441,10 +1479,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -1461,7 +1513,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_11360a702a194984_EOF
+ GH_AW_MCP_CONFIG_044aa7bc4fb54af5_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/smoke-copilot-arm.md b/.github/workflows/smoke-copilot-arm.md
index b461654abf..b6072b8ddd 100644
--- a/.github/workflows/smoke-copilot-arm.md
+++ b/.github/workflows/smoke-copilot-arm.md
@@ -20,6 +20,7 @@ imports:
- shared/gh.md
- shared/reporting.md
- shared/github-queries-mcp-script.md
+ - shared/mcp/serena-go.md
network:
allowed:
- defaults
@@ -34,9 +35,6 @@ tools:
- "*"
github:
playwright:
- serena:
- languages:
- go: {}
web-fetch:
runtimes:
go:
diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml
index 5963bd34c2..36715e27b7 100644
--- a/.github/workflows/smoke-copilot.lock.yml
+++ b/.github/workflows/smoke-copilot.lock.yml
@@ -26,9 +26,11 @@
# Imports:
# - shared/gh.md
# - shared/github-queries-mcp-script.md
+# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"e61f4503d40b9de053152c2ae68538ce50b287317fefc7567d935a4d1499560e","agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"38b177fbe41846384d9b7a2f66fe7f3f344216b074fae0e00c12e299dbc2ad47","agent_id":"copilot"}
name: "Smoke Copilot"
"on":
@@ -195,9 +197,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_911691aa67144f92_EOF'
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
- GH_AW_PROMPT_911691aa67144f92_EOF
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -205,7 +207,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_911691aa67144f92_EOF'
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
Tools: add_comment(max:2), create_issue, create_discussion, create_pull_request_review_comment(max:5), submit_pull_request_review, reply_to_pull_request_review_comment(max:5), add_labels, remove_labels, set_issue_type, dispatch_workflow, missing_tool, missing_data, noop
@@ -237,33 +239,69 @@ jobs:
{{/if}}
- GH_AW_PROMPT_911691aa67144f92_EOF
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_911691aa67144f92_EOF'
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
- GH_AW_PROMPT_911691aa67144f92_EOF
- cat << 'GH_AW_PROMPT_911691aa67144f92_EOF'
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
{{#runtime-import .github/workflows/shared/gh.md}}
- GH_AW_PROMPT_911691aa67144f92_EOF
- cat << 'GH_AW_PROMPT_911691aa67144f92_EOF'
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_911691aa67144f92_EOF
- cat << 'GH_AW_PROMPT_911691aa67144f92_EOF'
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
{{#runtime-import .github/workflows/shared/github-queries-mcp-script.md}}
- GH_AW_PROMPT_911691aa67144f92_EOF
- cat << 'GH_AW_PROMPT_911691aa67144f92_EOF'
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
+ {{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
+ cat << 'GH_AW_PROMPT_06264f48f8a682ee_EOF'
{{#runtime-import .github/workflows/smoke-copilot.md}}
- GH_AW_PROMPT_911691aa67144f92_EOF
+ GH_AW_PROMPT_06264f48f8a682ee_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number || inputs.item_number }}
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }}
- GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -504,12 +542,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_15f92489b5b6b844_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_b0eb0cdecf7d74dc_EOF'
{"add_comment":{"allowed_repos":["github/gh-aw"],"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-copilot"],"allowed_repos":["github/gh-aw"]},"create_discussion":{"category":"announcements","close_older_discussions":true,"close_older_key":"smoke-copilot","expires":2,"fallback_to_issue":true,"labels":["ai-generated"],"max":1},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-copilot","expires":2,"group":true,"labels":["automation","testing"],"max":1},"create_pull_request_review_comment":{"max":5,"side":"RIGHT"},"dispatch_workflow":{"max":1,"workflow_files":{"haiku-printer":".yml"},"workflows":["haiku-printer"]},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"reply_to_pull_request_review_comment":{"max":5},"send-slack-message":{"description":"Send a message to Slack (stub for testing)","inputs":{"message":{"description":"The message to send","required":false,"type":"string"}},"output":"Slack message stub executed!"},"set_issue_type":{},"submit_pull_request_review":{"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_15f92489b5b6b844_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_b0eb0cdecf7d74dc_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_b61b183d0813d040_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_3cf00183d52c7f59_EOF'
{
"description_suffixes": {
"add_comment": " CONSTRAINTS: Maximum 2 comment(s) can be added.",
@@ -567,8 +605,8 @@ jobs:
}
]
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_b61b183d0813d040_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_3a0130848e06a2fe_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_3cf00183d52c7f59_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_4185823aa5848c19_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -838,7 +876,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_3a0130848e06a2fe_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_4185823aa5848c19_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -881,7 +919,7 @@ jobs:
- name: Setup MCP Scripts Config
run: |
mkdir -p ${RUNNER_TEMP}/gh-aw/mcp-scripts/logs
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_a4566bfabe45aa45_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_9d7f4412456c42c4_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -997,8 +1035,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_a4566bfabe45aa45_EOF
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_c87b8c860d1974c4_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_9d7f4412456c42c4_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_04cbc2a57eb491b4_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -1012,12 +1050,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_c87b8c860d1974c4_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_04cbc2a57eb491b4_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs
- name: Setup MCP Scripts Tool Files
run: |
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_3ad540edaeb4d971_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_af522135b2e14108_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -1028,9 +1066,9 @@ jobs:
echo " token: ${GH_AW_GH_TOKEN:0:6}..."
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_3ad540edaeb4d971_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_af522135b2e14108_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_7b464ebdab7456b0_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_ff483f3ef5b00ef9_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-discussion-query
# Query GitHub discussions with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1165,9 +1203,9 @@ jobs:
EOF
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_7b464ebdab7456b0_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_ff483f3ef5b00ef9_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_4b5afc8f32276710_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_049cfb21543999d5_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-issue-query
# Query GitHub issues with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1246,9 +1284,9 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_4b5afc8f32276710_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_049cfb21543999d5_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_7afa50763df778fe_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_7c351e2efe83583d_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-pr-query
# Query GitHub pull requests with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1333,7 +1371,7 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_7afa50763df778fe_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_7c351e2efe83583d_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh
- name: Generate MCP Scripts Server Config
@@ -1406,7 +1444,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_1de5c4745e0b2994_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_5be18c0306cfd71e_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"agenticworkflows": {
@@ -1489,10 +1527,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -1509,7 +1561,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_1de5c4745e0b2994_EOF
+ GH_AW_MCP_CONFIG_5be18c0306cfd71e_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/smoke-copilot.md b/.github/workflows/smoke-copilot.md
index 78db17a288..03052ea2a3 100644
--- a/.github/workflows/smoke-copilot.md
+++ b/.github/workflows/smoke-copilot.md
@@ -23,6 +23,7 @@ imports:
- shared/gh.md
- shared/reporting.md
- shared/github-queries-mcp-script.md
+ - shared/mcp/serena-go.md
network:
allowed:
- defaults
@@ -37,9 +38,6 @@ tools:
- "*"
github:
playwright:
- serena:
- languages:
- go: {}
web-fetch:
runtimes:
go:
diff --git a/.github/workflows/terminal-stylist.lock.yml b/.github/workflows/terminal-stylist.lock.yml
index 29b3070af6..27399fc555 100644
--- a/.github/workflows/terminal-stylist.lock.yml
+++ b/.github/workflows/terminal-stylist.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"da1780268adb86447e56695671e9c4099f9e4f3cfbba2ad1ea6299110b676880","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"f0344a3841cec3dc22183e17b976c85e95a74fafbc989116453459a6418da6a3","strict":true,"agent_id":"copilot"}
name: "Terminal Stylist"
"on":
@@ -138,14 +139,14 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_7da1d5bf070bfb56_EOF'
+ cat << 'GH_AW_PROMPT_96b3acca85b37755_EOF'
- GH_AW_PROMPT_7da1d5bf070bfb56_EOF
+ GH_AW_PROMPT_96b3acca85b37755_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_7da1d5bf070bfb56_EOF'
+ cat << 'GH_AW_PROMPT_96b3acca85b37755_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -177,27 +178,60 @@ jobs:
{{/if}}
- GH_AW_PROMPT_7da1d5bf070bfb56_EOF
+ GH_AW_PROMPT_96b3acca85b37755_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_7da1d5bf070bfb56_EOF'
+ cat << 'GH_AW_PROMPT_96b3acca85b37755_EOF'
- GH_AW_PROMPT_7da1d5bf070bfb56_EOF
- cat << 'GH_AW_PROMPT_7da1d5bf070bfb56_EOF'
+ GH_AW_PROMPT_96b3acca85b37755_EOF
+ cat << 'GH_AW_PROMPT_96b3acca85b37755_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_96b3acca85b37755_EOF
+ cat << 'GH_AW_PROMPT_96b3acca85b37755_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_7da1d5bf070bfb56_EOF
- cat << 'GH_AW_PROMPT_7da1d5bf070bfb56_EOF'
+ GH_AW_PROMPT_96b3acca85b37755_EOF
+ cat << 'GH_AW_PROMPT_96b3acca85b37755_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_7da1d5bf070bfb56_EOF
- cat << 'GH_AW_PROMPT_7da1d5bf070bfb56_EOF'
+ GH_AW_PROMPT_96b3acca85b37755_EOF
+ cat << 'GH_AW_PROMPT_96b3acca85b37755_EOF'
{{#runtime-import .github/workflows/terminal-stylist.md}}
- GH_AW_PROMPT_7da1d5bf070bfb56_EOF
+ GH_AW_PROMPT_96b3acca85b37755_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -355,12 +389,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_f0b68779f50eb37a_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_686f25c08ea02af8_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_f0b68779f50eb37a_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_686f25c08ea02af8_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_11d2086560060736_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_24c386b0ff4977da_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Discussions will be created in category \"audits\"."
@@ -368,8 +402,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_11d2086560060736_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_2ac2890ccd1338a0_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_24c386b0ff4977da_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_1df089d2eb0534f9_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -455,7 +489,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_2ac2890ccd1338a0_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_1df089d2eb0534f9_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -523,7 +557,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_4231edfc8f8117da_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_9ba275aa3861ddf7_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -559,10 +593,24 @@ jobs:
"serena": {
"type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": ["--network", "host"],
"entrypoint": "serena",
- "entrypointArgs": ["start-mcp-server", "--context", "codex", "--project", "\${GITHUB_WORKSPACE}"],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "entrypointArgs": [
+ "start-mcp-server",
+ "--context",
+ "codex",
+ "--project",
+ "\${GITHUB_WORKSPACE}"
+ ],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
+ "tools": [
+ "*"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -579,7 +627,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_4231edfc8f8117da_EOF
+ GH_AW_MCP_CONFIG_9ba275aa3861ddf7_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/typist.lock.yml b/.github/workflows/typist.lock.yml
index 528280ce98..f50ae5e476 100644
--- a/.github/workflows/typist.lock.yml
+++ b/.github/workflows/typist.lock.yml
@@ -25,9 +25,10 @@
# Resolved workflow manifest:
# Imports:
# - shared/mcp/serena-go.md
+# - shared/mcp/serena.md
# - shared/reporting.md
#
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"a84f1d31ef089afc1ac4110815e97c8509584a617fb3384a318fe01d5e960c67","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"49fffa022552509cf47cc4d542b676a8cfe275c2c68a72bc8280c657fd059878","strict":true,"agent_id":"claude"}
name: "Typist - Go Type Analysis"
"on":
@@ -137,14 +138,14 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_d9db74ef422deab0_EOF'
+ cat << 'GH_AW_PROMPT_914ce2ef884d1b6b_EOF'
- GH_AW_PROMPT_d9db74ef422deab0_EOF
+ GH_AW_PROMPT_914ce2ef884d1b6b_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_d9db74ef422deab0_EOF'
+ cat << 'GH_AW_PROMPT_914ce2ef884d1b6b_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -176,27 +177,60 @@ jobs:
{{/if}}
- GH_AW_PROMPT_d9db74ef422deab0_EOF
+ GH_AW_PROMPT_914ce2ef884d1b6b_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_d9db74ef422deab0_EOF'
+ cat << 'GH_AW_PROMPT_914ce2ef884d1b6b_EOF'
- GH_AW_PROMPT_d9db74ef422deab0_EOF
- cat << 'GH_AW_PROMPT_d9db74ef422deab0_EOF'
+ GH_AW_PROMPT_914ce2ef884d1b6b_EOF
+ cat << 'GH_AW_PROMPT_914ce2ef884d1b6b_EOF'
+ ## Serena Code Analysis
+
+ The Serena MCP server is configured for **["go"]** analysis in this workspace:
+ - **Workspace**: `__GH_AW_GITHUB_WORKSPACE__`
+ - **Memory**: `/tmp/gh-aw/cache-memory/serena/`
+
+ ### Project Activation
+
+ Before analyzing code, activate the Serena project:
+ ```
+ Tool: activate_project
+ Args: { "path": "__GH_AW_GITHUB_WORKSPACE__" }
+ ```
+
+ ### Available Capabilities
+
+ Serena provides IDE-grade Language Server Protocol (LSP) tools including:
+ - **Symbol search**: `find_symbol` — locate functions, types, interfaces by name
+ - **Navigation**: `find_referencing_symbols` — find all callers/usages of a symbol
+ - **Type info**: `get_symbol_documentation` — hover-level type and doc information
+ - **Code editing**: `replace_symbol_body`, `insert_after_symbol` — symbol-level edits
+ - **Diagnostics**: `get_diagnostics` — compiler errors and linter warnings
+
+ ### Analysis Guidelines
+
+ 1. **Use semantic tools over text search** — prefer Serena's LSP tools over `grep`
+ 2. **Activate project first** — always call `activate_project` before other tools
+ 3. **Cross-reference findings** — validate with multiple tools for accuracy
+ 4. **Focus on the relevant language files** — ignore unrelated file types
+
+
+ GH_AW_PROMPT_914ce2ef884d1b6b_EOF
+ cat << 'GH_AW_PROMPT_914ce2ef884d1b6b_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
- GH_AW_PROMPT_d9db74ef422deab0_EOF
- cat << 'GH_AW_PROMPT_d9db74ef422deab0_EOF'
+ GH_AW_PROMPT_914ce2ef884d1b6b_EOF
+ cat << 'GH_AW_PROMPT_914ce2ef884d1b6b_EOF'
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
- GH_AW_PROMPT_d9db74ef422deab0_EOF
- cat << 'GH_AW_PROMPT_d9db74ef422deab0_EOF'
+ GH_AW_PROMPT_914ce2ef884d1b6b_EOF
+ cat << 'GH_AW_PROMPT_914ce2ef884d1b6b_EOF'
{{#runtime-import .github/workflows/typist.md}}
- GH_AW_PROMPT_d9db74ef422deab0_EOF
+ GH_AW_PROMPT_914ce2ef884d1b6b_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -358,12 +392,12 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_8366025ead98cc32_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_a66c2a5f4a203af9_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
- GH_AW_SAFE_OUTPUTS_CONFIG_8366025ead98cc32_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_a66c2a5f4a203af9_EOF
- name: Write Safe Outputs Tools
run: |
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_f1035f55f928e329_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_ff82ac30e842e242_EOF'
{
"description_suffixes": {
"create_discussion": " CONSTRAINTS: Maximum 1 discussion(s) can be created. Discussions will be created in category \"audits\"."
@@ -371,8 +405,8 @@ jobs:
"repo_params": {},
"dynamic_tools": []
}
- GH_AW_SAFE_OUTPUTS_TOOLS_META_f1035f55f928e329_EOF
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_04f68f9130114afa_EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_ff82ac30e842e242_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_c1252e580a094e6c_EOF'
{
"create_discussion": {
"defaultMax": 1,
@@ -458,7 +492,7 @@ jobs:
}
}
}
- GH_AW_SAFE_OUTPUTS_VALIDATION_04f68f9130114afa_EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_c1252e580a094e6c_EOF
node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
@@ -525,7 +559,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6'
- cat << GH_AW_MCP_CONFIG_50721f9211161101_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_c23ca653fe6f1ecb_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -558,11 +592,8 @@ jobs:
}
},
"serena": {
+ "type": "stdio",
"container": "ghcr.io/github/serena-mcp-server:latest",
- "args": [
- "--network",
- "host"
- ],
"entrypoint": "serena",
"entrypointArgs": [
"start-mcp-server",
@@ -571,7 +602,13 @@ jobs:
"--project",
"\${GITHUB_WORKSPACE}"
],
- "mounts": ["\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"],
+ "mounts": [
+ "\${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw"
+ ],
+ "args": [
+ "--network",
+ "host"
+ ],
"guard-policies": {
"write-sink": {
"accept": [
@@ -588,7 +625,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_50721f9211161101_EOF
+ GH_AW_MCP_CONFIG_c23ca653fe6f1ecb_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -686,13 +723,14 @@ jobs:
# - mcp__github__search_pull_requests
# - mcp__github__search_repositories
# - mcp__github__search_users
+ # - mcp__serena
timeout-minutes: 20
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
# shellcheck disable=SC1003
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.1 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\'' -type f),Bash(find pkg -type f -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\''),Bash(find pkg/ -maxdepth 1 -ls),Bash(grep -r '\''\'\'''\''\bany\b'\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep -r '\''\'\'''\''interface{}'\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep -r '\''\'\'''\''type '\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\'' -type f),Bash(find pkg -type f -name '\''\'\'''\''*.go'\''\'\'''\'' ! -name '\''\'\'''\''*_test.go'\''\'\'''\''),Bash(find pkg/ -maxdepth 1 -ls),Bash(grep -r '\''\'\'''\''\bany\b'\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep -r '\''\'\'''\''interface{}'\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep -r '\''\'\'''\''type '\''\'\'''\'' pkg --include='\''\'\'''\''*.go'\''\'\'''\''),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__serena'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
diff --git a/pkg/parser/import_bfs.go b/pkg/parser/import_bfs.go
index 1436fbd17e..548e70228a 100644
--- a/pkg/parser/import_bfs.go
+++ b/pkg/parser/import_bfs.go
@@ -291,104 +291,129 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a
log.Printf("Failed to extract frontmatter from %s: %v", item.fullPath, err)
} else if result.Frontmatter != nil {
// Check for nested imports field
+ type nestedImportEntry struct {
+ path string
+ inputs map[string]any
+ }
+ var nestedImports []nestedImportEntry
if nestedImportsField, hasImports := result.Frontmatter["imports"]; hasImports {
- var nestedImports []string
switch v := nestedImportsField.(type) {
case []any:
for _, nestedItem := range v {
if str, ok := nestedItem.(string); ok {
- nestedImports = append(nestedImports, str)
+ nestedImports = append(nestedImports, nestedImportEntry{path: str})
+ } else if nestedMap, ok := nestedItem.(map[string]any); ok {
+ // Handle uses/with or path/inputs syntax
+ var nestedPath string
+ if usesPath, ok := nestedMap["uses"].(string); ok {
+ nestedPath = usesPath
+ } else if pathVal, ok := nestedMap["path"].(string); ok {
+ nestedPath = pathVal
+ }
+ if nestedPath != "" {
+ var nestedInputs map[string]any
+ if withVal, ok := nestedMap["with"].(map[string]any); ok {
+ nestedInputs = withVal
+ } else if inputsVal, ok := nestedMap["inputs"].(map[string]any); ok {
+ nestedInputs = inputsVal
+ }
+ nestedImports = append(nestedImports, nestedImportEntry{path: nestedPath, inputs: nestedInputs})
+ }
}
}
case []string:
- nestedImports = v
+ for _, str := range v {
+ nestedImports = append(nestedImports, nestedImportEntry{path: str})
+ }
}
+ }
- // Add nested imports to queue (BFS: append to end)
- // For local imports: resolve relative to the workflows directory (baseDir)
- // For remote imports: resolve relative to .github/workflows/ in the remote repo
- for _, nestedImportPath := range nestedImports {
- // Handle section references
- var nestedFilePath, nestedSectionName string
- if strings.Contains(nestedImportPath, "#") {
- parts := strings.SplitN(nestedImportPath, "#", 2)
- nestedFilePath = parts[0]
- nestedSectionName = parts[1]
- } else {
- nestedFilePath = nestedImportPath
- }
+ // Add nested imports to queue (BFS: append to end)
+ // For local imports: resolve relative to the workflows directory (baseDir)
+ // For remote imports: resolve relative to .github/workflows/ in the remote repo
+ for _, nestedEntry := range nestedImports {
+ nestedImportPath := nestedEntry.path
+ // Handle section references
+ var nestedFilePath, nestedSectionName string
+ if strings.Contains(nestedImportPath, "#") {
+ parts := strings.SplitN(nestedImportPath, "#", 2)
+ nestedFilePath = parts[0]
+ nestedSectionName = parts[1]
+ } else {
+ nestedFilePath = nestedImportPath
+ }
- // Determine the resolution path and propagate remote origin context
- resolvedPath := nestedFilePath
- var nestedRemoteOrigin *remoteImportOrigin
+ // Determine the resolution path and propagate remote origin context
+ resolvedPath := nestedFilePath
+ var nestedRemoteOrigin *remoteImportOrigin
- if item.remoteOrigin != nil && !isWorkflowSpec(nestedFilePath) {
- // Parent was fetched from a remote repo and nested path is relative.
- // Convert to a workflowspec that resolves against the parent workflowspec's
- // base directory (e.g., gh-agent-workflows for gh-agent-workflows/gh-aw-workflows/file.md).
- cleanPath := path.Clean(strings.TrimPrefix(nestedFilePath, "./"))
+ if item.remoteOrigin != nil && !isWorkflowSpec(nestedFilePath) {
+ // Parent was fetched from a remote repo and nested path is relative.
+ // Convert to a workflowspec that resolves against the parent workflowspec's
+ // base directory (e.g., gh-agent-workflows for gh-agent-workflows/gh-aw-workflows/file.md).
+ cleanPath := path.Clean(strings.TrimPrefix(nestedFilePath, "./"))
- // Reject paths that escape the base directory (e.g., ../../../etc/passwd)
- if cleanPath == ".." || strings.HasPrefix(cleanPath, "../") || path.IsAbs(cleanPath) {
- return nil, fmt.Errorf("nested import '%s' from remote file '%s' escapes base directory", nestedFilePath, item.importPath)
- }
+ // Reject paths that escape the base directory (e.g., ../../../etc/passwd)
+ if cleanPath == ".." || strings.HasPrefix(cleanPath, "../") || path.IsAbs(cleanPath) {
+ return nil, fmt.Errorf("nested import '%s' from remote file '%s' escapes base directory", nestedFilePath, item.importPath)
+ }
- // Use the parent's BasePath if available, otherwise default to .github/workflows
- basePath := item.remoteOrigin.BasePath
- if basePath == "" {
- basePath = ".github/workflows"
- }
- // Clean the basePath to ensure it's normalized
- basePath = path.Clean(basePath)
-
- resolvedPath = fmt.Sprintf("%s/%s/%s/%s@%s",
- item.remoteOrigin.Owner, item.remoteOrigin.Repo, basePath, cleanPath, item.remoteOrigin.Ref)
- // Parse a new remoteOrigin from resolvedPath to get the correct BasePath
- // for THIS file's nested imports, not the parent's BasePath
- nestedRemoteOrigin = parseRemoteOrigin(resolvedPath)
- importLog.Printf("Resolving nested import as remote workflowspec: %s -> %s (basePath=%s)", nestedFilePath, resolvedPath, basePath)
- } else if isWorkflowSpec(nestedFilePath) {
- // Nested import is itself a workflowspec - parse its remote origin
- nestedRemoteOrigin = parseRemoteOrigin(nestedFilePath)
- if nestedRemoteOrigin != nil {
- importLog.Printf("Nested workflowspec import detected: %s (origin: %s/%s@%s)", nestedFilePath, nestedRemoteOrigin.Owner, nestedRemoteOrigin.Repo, nestedRemoteOrigin.Ref)
- }
+ // Use the parent's BasePath if available, otherwise default to .github/workflows
+ basePath := item.remoteOrigin.BasePath
+ if basePath == "" {
+ basePath = ".github/workflows"
+ }
+ // Clean the basePath to ensure it's normalized
+ basePath = path.Clean(basePath)
+
+ resolvedPath = fmt.Sprintf("%s/%s/%s/%s@%s",
+ item.remoteOrigin.Owner, item.remoteOrigin.Repo, basePath, cleanPath, item.remoteOrigin.Ref)
+ // Parse a new remoteOrigin from resolvedPath to get the correct BasePath
+ // for THIS file's nested imports, not the parent's BasePath
+ nestedRemoteOrigin = parseRemoteOrigin(resolvedPath)
+ importLog.Printf("Resolving nested import as remote workflowspec: %s -> %s (basePath=%s)", nestedFilePath, resolvedPath, basePath)
+ } else if isWorkflowSpec(nestedFilePath) {
+ // Nested import is itself a workflowspec - parse its remote origin
+ nestedRemoteOrigin = parseRemoteOrigin(nestedFilePath)
+ if nestedRemoteOrigin != nil {
+ importLog.Printf("Nested workflowspec import detected: %s (origin: %s/%s@%s)", nestedFilePath, nestedRemoteOrigin.Owner, nestedRemoteOrigin.Repo, nestedRemoteOrigin.Ref)
}
+ }
- nestedFullPath, err := ResolveIncludePath(resolvedPath, baseDir, cache)
- if err != nil {
- // If we have source information for the parent workflow, create a structured error
- if workflowFilePath != "" && yamlContent != "" {
- // For nested imports, we should report the error at the location where the parent import is defined
- // since the nested import file itself might not have source location
- line, column := findImportItemLocation(yamlContent, item.importPath)
- importErr := &ImportError{
- ImportPath: nestedImportPath,
- FilePath: workflowFilePath,
- Line: line,
- Column: column,
- Cause: err,
- }
- return nil, FormatImportError(importErr, yamlContent)
+ nestedFullPath, err := ResolveIncludePath(resolvedPath, baseDir, cache)
+ if err != nil {
+ // If we have source information for the parent workflow, create a structured error
+ if workflowFilePath != "" && yamlContent != "" {
+ // For nested imports, we should report the error at the location where the parent import is defined
+ // since the nested import file itself might not have source location
+ line, column := findImportItemLocation(yamlContent, item.importPath)
+ importErr := &ImportError{
+ ImportPath: nestedImportPath,
+ FilePath: workflowFilePath,
+ Line: line,
+ Column: column,
+ Cause: err,
}
- // Fallback to generic error
- return nil, fmt.Errorf("failed to resolve nested import '%s' from '%s': %w", nestedFilePath, item.fullPath, err)
+ return nil, FormatImportError(importErr, yamlContent)
}
+ // Fallback to generic error
+ return nil, fmt.Errorf("failed to resolve nested import '%s' from '%s': %w", nestedFilePath, item.fullPath, err)
+ }
- // Check for cycles - skip if already visited
- if !visited[nestedFullPath] {
- visited[nestedFullPath] = true
- queue = append(queue, importQueueItem{
- importPath: nestedImportPath,
- fullPath: nestedFullPath,
- sectionName: nestedSectionName,
- baseDir: baseDir, // Use original baseDir, not nestedBaseDir
- remoteOrigin: nestedRemoteOrigin,
- })
- log.Printf("Discovered nested import: %s -> %s (queued)", item.fullPath, nestedFullPath)
- } else {
- log.Printf("Skipping already visited nested import: %s (cycle detected)", nestedFullPath)
- }
+ // Check for cycles - skip if already visited
+ if !visited[nestedFullPath] {
+ visited[nestedFullPath] = true
+ queue = append(queue, importQueueItem{
+ importPath: nestedImportPath,
+ fullPath: nestedFullPath,
+ sectionName: nestedSectionName,
+ baseDir: baseDir, // Use original baseDir, not nestedBaseDir
+ inputs: nestedEntry.inputs,
+ remoteOrigin: nestedRemoteOrigin,
+ })
+ log.Printf("Discovered nested import: %s -> %s (queued)", item.fullPath, nestedFullPath)
+ } else {
+ log.Printf("Skipping already visited nested import: %s (cycle detected)", nestedFullPath)
}
}
}
@@ -413,8 +438,9 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a
}
// parseImportSpecsFromArray parses an []any slice into a list of ImportSpec values.
-// Each element must be a string (simple path) or a map with a required "path" key
-// and an optional "inputs" map.
+// Each element must be a string (simple path) or a map with a required "path" or "uses"
+// key and an optional "inputs" or "with" map. The "uses"/"with" form mirrors GitHub Actions
+// reusable workflow syntax and is an alias for "path"/"inputs".
func parseImportSpecsFromArray(items []any) ([]ImportSpec, error) {
var specs []ImportSpec
for _, item := range items {
@@ -422,25 +448,34 @@ func parseImportSpecsFromArray(items []any) ([]ImportSpec, error) {
case string:
specs = append(specs, ImportSpec{Path: importItem})
case map[string]any:
+ // Accept "uses" as an alias for "path"
pathValue, hasPath := importItem["path"]
if !hasPath {
- return nil, errors.New("import object must have a 'path' field")
+ pathValue, hasPath = importItem["uses"]
+ }
+ if !hasPath {
+ return nil, errors.New("import object must have a 'path' or 'uses' field")
}
pathStr, ok := pathValue.(string)
if !ok {
- return nil, errors.New("import 'path' must be a string")
+ return nil, errors.New("import 'path'/'uses' must be a string")
}
+ // Accept "with" as an alias for "inputs"
var inputs map[string]any
- if inputsValue, hasInputs := importItem["inputs"]; hasInputs {
+ inputsValue, hasInputs := importItem["inputs"]
+ if !hasInputs {
+ inputsValue, hasInputs = importItem["with"]
+ }
+ if hasInputs {
if inputsMap, ok := inputsValue.(map[string]any); ok {
inputs = inputsMap
} else {
- return nil, errors.New("import 'inputs' must be an object")
+ return nil, errors.New("import 'inputs'/'with' must be an object")
}
}
specs = append(specs, ImportSpec{Path: pathStr, Inputs: inputs})
default:
- return nil, errors.New("import item must be a string or an object with 'path' field")
+ return nil, errors.New("import item must be a string or an object with 'path'/'uses' field")
}
}
return specs, nil
diff --git a/pkg/parser/import_field_extractor.go b/pkg/parser/import_field_extractor.go
index a253aaad50..2eff5c24be 100644
--- a/pkg/parser/import_field_extractor.go
+++ b/pkg/parser/import_field_extractor.go
@@ -8,6 +8,7 @@ import (
"encoding/json"
"fmt"
"path/filepath"
+ "regexp"
"strings"
)
@@ -71,10 +72,34 @@ func newImportAccumulator() *importAccumulator {
// skip-roles, skip-bots, post-steps, labels, cache, and features.
func (acc *importAccumulator) extractAllImportFields(content []byte, item importQueueItem, visited map[string]bool) error {
log.Printf("Extracting all import fields: path=%s, section=%s, inputs=%d, content_size=%d bytes", item.fullPath, item.sectionName, len(item.inputs), len(content))
- // Extract tools from imported file
- toolsContent, err := processIncludedFileWithVisited(item.fullPath, item.sectionName, true, visited)
- if err != nil {
- return fmt.Errorf("failed to process imported file '%s': %w", item.fullPath, err)
+
+ // When the import provides 'with' inputs, apply expression substitution to the raw
+ // content before any YAML or markdown processing. This enables ${{ github.aw.import-inputs.* }}
+ // expressions in the imported workflow's frontmatter fields (tools, runtimes, etc.)
+ // as well as in the markdown body. Array and map values are serialized as JSON so they
+ // produce valid YAML inline syntax (e.g. ["go","typescript"]).
+ rawContent := string(content)
+ if len(item.inputs) > 0 {
+ rawContent = substituteImportInputsInContent(rawContent, item.inputs)
+ }
+
+ // Extract tools from imported file.
+ // When inputs are present we use the already-substituted content (to pick up any
+ // ${{ github.aw.import-inputs.* }} expressions in the tools/mcp-servers frontmatter)
+ // rather than re-reading the original file from disk.
+ var toolsContent string
+ if len(item.inputs) > 0 {
+ var err error
+ toolsContent, err = extractToolsFromContent(rawContent)
+ if err != nil {
+ return fmt.Errorf("failed to extract tools from '%s': %w", item.fullPath, err)
+ }
+ } else {
+ var err error
+ toolsContent, err = processIncludedFileWithVisited(item.fullPath, item.sectionName, true, visited)
+ if err != nil {
+ return fmt.Errorf("failed to process imported file '%s': %w", item.fullPath, err)
+ }
}
acc.toolsBuilder.WriteString(toolsContent + "\n")
@@ -89,11 +114,13 @@ func (acc *importAccumulator) extractAllImportFields(content []byte, item import
acc.importPaths = append(acc.importPaths, importRelPath)
log.Printf("Added import path for runtime-import: %s", importRelPath)
} else if len(item.inputs) > 0 {
- // Has inputs - must inline for compile-time substitution
+ // Has inputs - must inline for compile-time substitution.
+ // Extract markdown from the already-substituted content so that import-inputs
+ // expressions embedded in the markdown body are resolved here.
log.Printf("Import %s has inputs - will be inlined for compile-time substitution", importRelPath)
- markdownContent, err := processIncludedFileWithVisited(item.fullPath, item.sectionName, false, visited)
+ markdownContent, err := ExtractMarkdownContent(rawContent)
if err != nil {
- return fmt.Errorf("failed to process markdown from imported file '%s': %w", item.fullPath, err)
+ return fmt.Errorf("failed to extract markdown from imported file '%s': %w", item.fullPath, err)
}
if markdownContent != "" {
acc.markdownBuilder.WriteString(markdownContent)
@@ -110,7 +137,9 @@ func (acc *importAccumulator) extractAllImportFields(content []byte, item import
// Parse frontmatter once to avoid redundant YAML parsing for each field extraction.
// All subsequent field extractions use the pre-parsed result.
- parsed, err := ExtractFrontmatterFromContent(string(content))
+ // When inputs are present we parse the already-substituted content so that all
+ // frontmatter fields (runtimes, mcp-servers, etc.) reflect the resolved values.
+ parsed, err := ExtractFrontmatterFromContent(rawContent)
var fm map[string]any
if err == nil {
fm = parsed.Frontmatter
@@ -118,6 +147,28 @@ func (acc *importAccumulator) extractAllImportFields(content []byte, item import
fm = make(map[string]any)
}
+ // Validate 'with'/'inputs' values against the imported workflow's 'import-schema' (if present).
+ // Run validation even when inputs is nil/empty so required fields can be detected.
+ // Use the ORIGINAL (unsubstituted) frontmatter for schema lookup so the import-schema
+ // declaration itself is not affected by expression substitution.
+ if len(item.inputs) > 0 || string(content) != rawContent {
+ // When substitution happened, reload the original frontmatter for schema validation.
+ origParsed, origErr := ExtractFrontmatterFromContent(string(content))
+ if origErr == nil {
+ if _, hasSchema := origParsed.Frontmatter["import-schema"]; hasSchema {
+ if err := validateWithImportSchema(item.inputs, origParsed.Frontmatter, item.importPath); err != nil {
+ return err
+ }
+ }
+ }
+ } else {
+ if _, hasSchema := fm["import-schema"]; hasSchema {
+ if err := validateWithImportSchema(item.inputs, fm, item.importPath); err != nil {
+ return err
+ }
+ }
+ }
+
// Extract engines from imported file
engineContent, err := extractFieldJSONFromMap(fm, "engine", "")
if err == nil && engineContent != "" {
@@ -370,3 +421,240 @@ func validateGitHubAppJSON(appJSON string) string {
}
return appJSON
}
+
+// validateWithImportSchema validates the provided 'with'/'inputs' values against
+// the 'import-schema' declared in the imported workflow's frontmatter.
+// It checks that:
+// - all required parameters declared in import-schema are present in 'with'
+// - no unknown parameters are provided (i.e., not declared in import-schema)
+// - provided values match the declared type (string, number, boolean, choice)
+// - choice values are within the allowed options list
+//
+// If the imported workflow has no 'import-schema', all provided 'with' values are
+// accepted without validation (backward compatibility with 'inputs' form).
+func validateWithImportSchema(inputs map[string]any, fm map[string]any, importPath string) error {
+ rawSchema, hasSchema := fm["import-schema"]
+ if !hasSchema {
+ return nil
+ }
+ schemaMap, ok := rawSchema.(map[string]any)
+ if !ok {
+ return nil
+ }
+ if len(schemaMap) == 0 {
+ return nil
+ }
+
+ // Check for unknown keys not declared in import-schema
+ for key := range inputs {
+ if _, declared := schemaMap[key]; !declared {
+ return fmt.Errorf("import '%s': unknown 'with' input %q is not declared in the import-schema", importPath, key)
+ }
+ }
+
+ // Check each declared schema field
+ for paramName, paramDefRaw := range schemaMap {
+ paramDef, _ := paramDefRaw.(map[string]any)
+
+ // Check required parameters
+ if req, _ := paramDef["required"].(bool); req {
+ if _, provided := inputs[paramName]; !provided {
+ return fmt.Errorf("import '%s': required 'with' input %q is missing (declared in import-schema)", importPath, paramName)
+ }
+ }
+
+ value, provided := inputs[paramName]
+ if !provided {
+ continue
+ }
+
+ // Skip type validation when type is not specified
+ declaredType, _ := paramDef["type"].(string)
+ if declaredType == "" {
+ continue
+ }
+
+ // Validate type
+ if err := validateImportInputType(paramName, value, declaredType, paramDef, importPath); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// validateObjectInput validates a 'with' value of type object against the
+// one-level deep 'properties' declared in the import-schema.
+func validateObjectInput(name string, value any, paramDef map[string]any, importPath string) error {
+ objMap, ok := value.(map[string]any)
+ if !ok {
+ return fmt.Errorf("import '%s': 'with' input %q must be an object (got %T)", importPath, name, value)
+ }
+ propsAny, hasProps := paramDef["properties"]
+ if !hasProps {
+ return nil // no schema for properties - accept any object
+ }
+ propsMap, ok := propsAny.(map[string]any)
+ if !ok {
+ return nil
+ }
+ // Check for unknown sub-keys
+ for subKey := range objMap {
+ if _, declared := propsMap[subKey]; !declared {
+ return fmt.Errorf("import '%s': 'with' input %q has unknown property %q (not in import-schema)", importPath, name, subKey)
+ }
+ }
+ // Validate each declared property
+ for propName, propDefRaw := range propsMap {
+ propDef, _ := propDefRaw.(map[string]any)
+ // Check required sub-fields
+ if req, _ := propDef["required"].(bool); req {
+ if _, provided := objMap[propName]; !provided {
+ return fmt.Errorf("import '%s': required property %q of 'with' input %q is missing", importPath, propName, name)
+ }
+ }
+ subValue, provided := objMap[propName]
+ if !provided {
+ continue
+ }
+ propType, _ := propDef["type"].(string)
+ if propType == "" {
+ continue
+ }
+ qualifiedName := name + "." + propName
+ if err := validateImportInputType(qualifiedName, subValue, propType, propDef, importPath); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// validateImportInputType checks that a single 'with' value matches the declared type.
+func validateImportInputType(name string, value any, declaredType string, paramDef map[string]any, importPath string) error {
+ switch declaredType {
+ case "string":
+ if _, ok := value.(string); !ok {
+ return fmt.Errorf("import '%s': 'with' input %q must be a string (got %T)", importPath, name, value)
+ }
+ case "number":
+ // Accept all numeric types that YAML parsers may produce
+ switch value.(type) {
+ case int, int8, int16, int32, int64,
+ uint, uint8, uint16, uint32, uint64,
+ float32, float64:
+ // OK
+ default:
+ return fmt.Errorf("import '%s': 'with' input %q must be a number (got %T)", importPath, name, value)
+ }
+ case "boolean":
+ if _, ok := value.(bool); !ok {
+ return fmt.Errorf("import '%s': 'with' input %q must be a boolean (got %T)", importPath, name, value)
+ }
+ case "choice":
+ strVal, ok := value.(string)
+ if !ok {
+ return fmt.Errorf("import '%s': 'with' input %q must be a string for choice type (got %T)", importPath, name, value)
+ }
+ if opts, hasOpts := paramDef["options"]; hasOpts {
+ if optsList, ok := opts.([]any); ok {
+ for _, opt := range optsList {
+ if optStr, ok := opt.(string); ok && optStr == strVal {
+ return nil
+ }
+ }
+ return fmt.Errorf("import '%s': 'with' input %q value %q is not in the allowed options", importPath, name, strVal)
+ }
+ }
+ case "array":
+ arr, ok := value.([]any)
+ if !ok {
+ return fmt.Errorf("import '%s': 'with' input %q must be an array (got %T)", importPath, name, value)
+ }
+ // Validate item types if an 'items' schema is declared
+ itemsDefRaw, hasItems := paramDef["items"]
+ if !hasItems {
+ return nil
+ }
+ itemsDef, _ := itemsDefRaw.(map[string]any)
+ itemType, _ := itemsDef["type"].(string)
+ if itemType == "" {
+ return nil
+ }
+ for i, item := range arr {
+ itemName := fmt.Sprintf("%s[%d]", name, i)
+ if err := validateImportInputType(itemName, item, itemType, itemsDef, importPath); err != nil {
+ return err
+ }
+ }
+ case "object":
+ return validateObjectInput(name, value, paramDef, importPath)
+ }
+ return nil
+}
+
+// importInputsExprRegex matches ${{ github.aw.import-inputs. }} and
+// ${{ github.aw.import-inputs.. }} expressions in raw content.
+var importInputsExprRegex = regexp.MustCompile(`\$\{\{\s*github\.aw\.import-inputs\.([a-zA-Z0-9_-]+(?:\.[a-zA-Z0-9_-]+)?)\s*\}\}`)
+
+// legacyInputsExprRegex matches ${{ github.aw.inputs. }} (legacy form) in raw content.
+var legacyInputsExprRegex = regexp.MustCompile(`\$\{\{\s*github\.aw\.inputs\.([a-zA-Z0-9_-]+)\s*\}\}`)
+
+// substituteImportInputsInContent performs text-level substitution of
+// ${{ github.aw.import-inputs.* }} and ${{ github.aw.inputs.* }} expressions
+// in raw file content (including YAML frontmatter). This is called before YAML
+// parsing so that array/object values serialised as JSON produce valid YAML.
+func substituteImportInputsInContent(content string, inputs map[string]any) string {
+ if len(inputs) == 0 {
+ return content
+ }
+
+ resolve := func(path string) (string, bool) {
+ top, sub, hasDot := strings.Cut(path, ".")
+ var value any
+ var ok bool
+ if !hasDot {
+ value, ok = inputs[top]
+ } else {
+ // one-level deep: "obj.sub"
+ topVal, topOK := inputs[top]
+ if !topOK {
+ return "", false
+ }
+ if obj, isMap := topVal.(map[string]any); isMap {
+ value, ok = obj[sub]
+ }
+ }
+ if !ok {
+ return "", false
+ }
+ // Serialize the value: arrays and maps as JSON (valid YAML inline syntax),
+ // scalars with fmt.Sprintf.
+ switch v := value.(type) {
+ case []any:
+ if b, err := json.Marshal(v); err == nil {
+ return string(b), true
+ }
+ case map[string]any:
+ if b, err := json.Marshal(v); err == nil {
+ return string(b), true
+ }
+ }
+ return fmt.Sprintf("%v", value), true
+ }
+
+ replaceFunc := func(regex *regexp.Regexp) func(string) string {
+ return func(match string) string {
+ m := regex.FindStringSubmatch(match)
+ if len(m) < 2 {
+ return match
+ }
+ if strVal, found := resolve(m[1]); found {
+ return strVal
+ }
+ return match
+ }
+ }
+
+ result := legacyInputsExprRegex.ReplaceAllStringFunc(content, replaceFunc(legacyInputsExprRegex))
+ result = importInputsExprRegex.ReplaceAllStringFunc(result, replaceFunc(importInputsExprRegex))
+ return result
+}
diff --git a/pkg/parser/include_processor.go b/pkg/parser/include_processor.go
index 0643401706..37f8ddeac6 100644
--- a/pkg/parser/include_processor.go
+++ b/pkg/parser/include_processor.go
@@ -7,6 +7,7 @@ import (
"fmt"
"os"
"path/filepath"
+ "slices"
"strings"
"github.com/github/gh-aw/pkg/console"
@@ -165,6 +166,7 @@ func processIncludedFileWithVisited(filePath, sectionName string, extractTools b
"secret-masking": true,
"applyTo": true,
"inputs": true,
+ "import-schema": true, // Declares parameter schema for 'uses'/'with' import syntax
"infer": true, // Custom agent format field (Copilot) - deprecated, use disable-model-invocation
"disable-model-invocation": true, // Custom agent format field (Copilot)
"features": true,
@@ -186,9 +188,13 @@ func processIncludedFileWithVisited(filePath, sectionName string, extractTools b
}
// Validate the tools, engine, network, and mcp-servers sections if present
- // Skip tools validation for custom agent files as they use a different format (array vs object)
+ // Skip tools/mcp-servers validation for custom agent files as they use a different format (array vs object)
+ // Skip validation entirely if any frontmatter values contain unsubstituted ${{ }}
+ // expressions — these are import-schema parameterised fields whose actual values
+ // are provided by the importing workflow; schema validation happens after substitution.
+ hasExpressions := frontmatterContainsExpressions(result.Frontmatter)
filteredFrontmatter := map[string]any{}
- if !isAgentFile {
+ if !isAgentFile && !hasExpressions {
if tools, hasTools := result.Frontmatter["tools"]; hasTools {
filteredFrontmatter["tools"] = tools
}
@@ -199,8 +205,10 @@ func processIncludedFileWithVisited(filePath, sectionName string, extractTools b
if network, hasNetwork := result.Frontmatter["network"]; hasNetwork {
filteredFrontmatter["network"] = network
}
- if mcpServers, hasMCPServers := result.Frontmatter["mcp-servers"]; hasMCPServers {
- filteredFrontmatter["mcp-servers"] = mcpServers
+ if !hasExpressions {
+ if mcpServers, hasMCPServers := result.Frontmatter["mcp-servers"]; hasMCPServers {
+ filteredFrontmatter["mcp-servers"] = mcpServers
+ }
}
// Note: we don't validate imports field as it's handled separately
if len(filteredFrontmatter) > 0 {
@@ -261,3 +269,29 @@ func processIncludedFileWithVisited(filePath, sectionName string, extractTools b
return strings.Trim(markdownContent, "\n") + "\n", nil
}
+
+// frontmatterContainsExpressions reports whether any string value in the frontmatter map
+// (recursively) contains an unsubstituted ${{ }} expression. Shared workflows that use
+// import-schema parameterisation may have ${{ github.aw.import-inputs.* }} expressions in
+// their frontmatter fields (e.g. tools.serena) that are only resolved at import time.
+// Validation of such files is deferred to avoid false-positive schema warnings.
+func frontmatterContainsExpressions(m map[string]any) bool {
+ for _, v := range m {
+ if containsExpression(v) {
+ return true
+ }
+ }
+ return false
+}
+
+func containsExpression(v any) bool {
+ switch val := v.(type) {
+ case string:
+ return strings.Contains(val, "${{")
+ case map[string]any:
+ return frontmatterContainsExpressions(val)
+ case []any:
+ return slices.ContainsFunc(val, containsExpression)
+ }
+ return false
+}
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 48f19ae8b5..46a2b06829 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -100,6 +100,85 @@
},
{
"type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "type": "object",
+ "description": "Import specification with 'uses'/'with' syntax (mirrors GitHub Actions reusable workflow syntax). 'uses' references the workflow path and 'with' provides input values.",
+ "required": ["uses"],
+ "additionalProperties": false,
+ "properties": {
+ "uses": {
+ "type": "string",
+ "description": "Workflow specification in format owner/repo/path@ref. Alias for 'path'."
+ },
+ "with": {
+ "type": "object",
+ "description": "Input values to pass to the imported workflow, validated against the imported workflow's 'import-schema'. Alias for 'inputs'.",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
}
]
}
@@ -137,7 +216,96 @@
"type": "object",
"description": "Input values to pass to the imported workflow.",
"additionalProperties": {
- "oneOf": [{ "type": "string" }, { "type": "number" }, { "type": "boolean" }]
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "type": "object",
+ "description": "Import specification with 'uses'/'with' syntax.",
+ "required": ["uses"],
+ "additionalProperties": false,
+ "properties": {
+ "uses": {
+ "type": "string",
+ "description": "Workflow specification in format owner/repo/path@ref."
+ },
+ "with": {
+ "type": "object",
+ "description": "Input values to pass to the imported workflow.",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ }
+ ]
}
}
}
@@ -1582,7 +1750,7 @@
"oneOf": [
{
"type": "null",
- "description": "Bare key with no value — equivalent to true. Skips workflow execution if any CI checks on the target branch are currently failing."
+ "description": "Bare key with no value \u2014 equivalent to true. Skips workflow execution if any CI checks on the target branch are currently failing."
},
{
"type": "boolean",
@@ -1751,7 +1919,52 @@
"with": {
"type": "object",
"description": "Input parameters for the action",
- "additionalProperties": true
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ }
+ ]
+ }
},
"env": {
"type": "object",
@@ -2119,7 +2332,52 @@
"with": {
"type": "object",
"description": "A map of the input parameters defined by the action. Each input parameter is a key/value pair.",
- "additionalProperties": true
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ }
+ ]
+ }
},
"env": {
"type": "object",
@@ -2263,6 +2521,39 @@
},
{
"type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
}
]
}
@@ -3815,7 +4106,9 @@
"description": "Timeout in seconds for MCP server startup. Applies to MCP server initialization if supported by the engine. Default: 120 seconds."
},
"serena": {
- "description": "Serena MCP server for AI-powered code intelligence with language service integration",
+ "description": "DEPRECATED: Use 'mcp-servers.serena' with the shared/mcp/serena.md workflow instead. Serena MCP server for AI-powered code intelligence with language service integration.",
+ "deprecated": true,
+ "x-deprecation-message": "tools.serena is deprecated. Import shared/mcp/serena.md and use mcp-servers.serena instead:\n imports:\n - uses: shared/mcp/serena.md\n with:\n languages: [\"go\", \"typescript\"]",
"oneOf": [
{
"type": "null",
@@ -8636,6 +8929,143 @@
"github-app": {
"$ref": "#/$defs/github_app",
"description": "Top-level GitHub App configuration used as a fallback for all nested github-app token minting operations (on, safe-outputs, checkout, tools.github, dependencies). When a nested section does not define its own github-app, this top-level configuration is used automatically."
+ },
+ "import-schema": {
+ "type": "object",
+ "description": "Schema for validating 'with' input values when this workflow is imported by another workflow using the 'uses'/'with' syntax. Defines the expected parameters, their types, and whether they are required. Scalar inputs are accessible via '${{ github.aw.import-inputs. }}' expressions. Object inputs (type: object) allow one-level deep sub-fields accessible via '${{ github.aw.import-inputs.. }}' expressions.",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "object",
+ "description": "Input parameter definition for scalar types (string, number, boolean, choice).",
+ "properties": {
+ "description": {
+ "type": "string",
+ "description": "Human-readable description of the input parameter."
+ },
+ "required": {
+ "type": "boolean",
+ "description": "Whether this input is required when using 'with' in an import.",
+ "default": false
+ },
+ "default": {
+ "description": "Default value used when 'with' does not provide this input."
+ },
+ "type": {
+ "type": "string",
+ "enum": ["string", "number", "boolean", "choice", "array"],
+ "description": "The type of the input value."
+ },
+ "options": {
+ "type": "array",
+ "description": "List of allowed values for choice type inputs.",
+ "items": {
+ "type": "string"
+ }
+ },
+ "items": {
+ "type": "object",
+ "description": "Schema for individual array elements. Typically {\"type\": \"string\"}.",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["string", "number", "boolean"],
+ "description": "Type of each array item."
+ }
+ },
+ "additionalProperties": false
+ }
+ },
+ "additionalProperties": false
+ },
+ {
+ "type": "object",
+ "description": "Input parameter definition for object type (one level deep). Use 'properties' to declare the expected sub-fields.",
+ "required": ["type"],
+ "properties": {
+ "description": {
+ "type": "string",
+ "description": "Human-readable description of the input parameter."
+ },
+ "required": {
+ "type": "boolean",
+ "description": "Whether this input is required when using 'with' in an import.",
+ "default": false
+ },
+ "type": {
+ "type": "string",
+ "enum": ["object"],
+ "description": "The type 'object' enables structured sub-fields accessible via 'github.aw.import-inputs..'."
+ },
+ "properties": {
+ "type": "object",
+ "description": "One-level deep property definitions for this object input.",
+ "additionalProperties": {
+ "type": "object",
+ "description": "Sub-property definition.",
+ "properties": {
+ "description": {
+ "type": "string"
+ },
+ "required": {
+ "type": "boolean",
+ "default": false
+ },
+ "default": {},
+ "type": {
+ "type": "string",
+ "enum": ["string", "number", "boolean", "choice"],
+ "description": "Type of the sub-property."
+ },
+ "options": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "additionalProperties": false
+ }
+ }
+ },
+ "additionalProperties": false
+ }
+ ]
+ },
+ "examples": [
+ {
+ "count": {
+ "type": "number",
+ "description": "Number of items to process",
+ "required": true
+ },
+ "category": {
+ "type": "string",
+ "description": "Category filter",
+ "default": "general"
+ },
+ "languages": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "List of language identifiers"
+ },
+ "config": {
+ "type": "object",
+ "properties": {
+ "apiKey": {
+ "type": "string",
+ "required": true
+ },
+ "timeout": {
+ "type": "number",
+ "default": 30
+ }
+ }
+ }
+ }
+ ]
}
},
"additionalProperties": false,
@@ -9488,7 +9918,52 @@
"with": {
"type": "object",
"description": "Input parameters defined by the action",
- "additionalProperties": true
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object",
+ "description": "Object value for import-schema parameters of type 'object' (one level deep).",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ }
+ }
+ ]
+ }
},
"env": {
"type": "object",
diff --git a/pkg/workflow/codex_mcp.go b/pkg/workflow/codex_mcp.go
index f49eb82440..3f3ec730ca 100644
--- a/pkg/workflow/codex_mcp.go
+++ b/pkg/workflow/codex_mcp.go
@@ -56,8 +56,14 @@ func (e *CodexEngine) RenderMCPConfig(yaml *strings.Builder, tools map[string]an
qmdTool := expandedTools["qmd"]
renderer.RenderQmdMCP(yaml, qmdTool, workflowData)
case "serena":
- serenaTool := expandedTools["serena"]
- renderer.RenderSerenaMCP(yaml, serenaTool)
+ // If serena has an explicit MCP server config (with container field), use the
+ // custom renderer instead of the built-in hardcoded serena renderer.
+ if handled := HandleCustomMCPToolInSwitch(yaml, toolName, expandedTools, false, func(yaml *strings.Builder, toolName string, toolConfig map[string]any, isLast bool) error {
+ return e.renderCodexMCPConfigWithContext(yaml, toolName, toolConfig, workflowData)
+ }); !handled {
+ serenaTool := expandedTools["serena"]
+ renderer.RenderSerenaMCP(yaml, serenaTool)
+ }
case "agentic-workflows":
renderer.RenderAgenticWorkflowsMCP(yaml)
case "safe-outputs":
diff --git a/pkg/workflow/compiler_orchestrator_tools.go b/pkg/workflow/compiler_orchestrator_tools.go
index 5e6db849ad..9313cee757 100644
--- a/pkg/workflow/compiler_orchestrator_tools.go
+++ b/pkg/workflow/compiler_orchestrator_tools.go
@@ -71,6 +71,20 @@ func (c *Compiler) processToolsAndMarkdown(result *parser.FrontmatterResult, cle
// Extract tools from the main file
topTools := extractToolsFromFrontmatter(result.Frontmatter)
+ // Emit deprecation warning if tools.serena is used directly in frontmatter.
+ // The check is done here, before merging with mcp-servers, so that the warning
+ // fires only for explicit tools.serena usage and not when serena is configured
+ // via the recommended mcp-servers.serena approach (shared/mcp/serena.md import).
+ if _, exists := topTools["serena"]; exists {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(
+ "'tools.serena' is deprecated. Use 'mcp-servers.serena' with the shared/mcp/serena.md workflow instead:\n"+
+ " imports:\n"+
+ " - uses: shared/mcp/serena.md\n"+
+ " with:\n"+
+ " languages: [\"go\", \"typescript\"]",
+ ))
+ }
+
// Extract mcp-servers from the main file and merge them into tools
mcpServers := extractMCPServersFromFrontmatter(result.Frontmatter)
diff --git a/pkg/workflow/docker.go b/pkg/workflow/docker.go
index c1a3ae10bd..41158d7e52 100644
--- a/pkg/workflow/docker.go
+++ b/pkg/workflow/docker.go
@@ -41,11 +41,18 @@ func collectDockerImages(tools map[string]any, workflowData *WorkflowData, actio
// Check for Serena tool (uses Docker image)
if serenaTool, hasSerena := tools["serena"]; hasSerena {
- // Select the appropriate Serena container image based on configured languages
- // selectSerenaContainer() returns the base image path (e.g., "ghcr.io/github/serena-mcp-server")
- // which we then tag with ":latest" to match the MCP config renderer
- containerImage := selectSerenaContainer(serenaTool)
- image := containerImage + ":latest"
+ var image string
+ // Check for explicit container config first (shared/mcp/serena.md style)
+ if serenaMap, ok := serenaTool.(map[string]any); ok {
+ if container, hasContainer := serenaMap["container"].(string); hasContainer {
+ image = container // Already includes :latest from YAML config
+ }
+ }
+ if image == "" {
+ // Fall back to language-based container selection (legacy tools.serena style)
+ containerImage := selectSerenaContainer(serenaTool)
+ image = containerImage + ":latest"
+ }
if !imageSet[image] {
images = append(images, image)
imageSet[image] = true
diff --git a/pkg/workflow/expression_extraction.go b/pkg/workflow/expression_extraction.go
index 8b2d822bbb..900dbc87bd 100644
--- a/pkg/workflow/expression_extraction.go
+++ b/pkg/workflow/expression_extraction.go
@@ -3,6 +3,7 @@ package workflow
import (
"crypto/sha256"
"encoding/hex"
+ "encoding/json"
"fmt"
"os"
"regexp"
@@ -247,6 +248,11 @@ func (e *ExpressionExtractor) ReplaceExpressionsWithEnvVars(markdown string) str
// awInputsExprRegex matches ${{ github.aw.inputs. }} expressions
var awInputsExprRegex = regexp.MustCompile(`\$\{\{\s*github\.aw\.inputs\.([a-zA-Z0-9_-]+)\s*\}\}`)
+// awImportInputsExprRegex matches ${{ github.aw.import-inputs. }} and
+// ${{ github.aw.import-inputs.. }} expressions (import-schema form).
+// Captures the full dotted path (e.g. "count" or "config.apiKey").
+var awImportInputsExprRegex = regexp.MustCompile(`\$\{\{\s*github\.aw\.import-inputs\.([a-zA-Z0-9_-]+(?:\.[a-zA-Z0-9_-]+)?)\s*\}\}`)
+
// applyWorkflowDispatchFallbacks enhances entity number expressions with an
// "|| inputs.item_number" fallback when the workflow has a workflow_dispatch
// trigger that includes the item_number input (generated by the label trigger
@@ -280,8 +286,9 @@ func applyWorkflowDispatchFallbacks(mappings []*ExpressionMapping, hasItemNumber
}
}
-// SubstituteImportInputs replaces ${{ github.aw.inputs. }} expressions
-// with the corresponding values from the importInputs map.
+// SubstituteImportInputs replaces ${{ github.aw.inputs. }} and
+// ${{ github.aw.import-inputs. }} expressions with the corresponding
+// values from the importInputs map.
// This is called before expression extraction to inject import input values.
func SubstituteImportInputs(content string, importInputs map[string]any) string {
if len(importInputs) == 0 {
@@ -290,25 +297,70 @@ func SubstituteImportInputs(content string, importInputs map[string]any) string
expressionExtractionLog.Printf("Substituting import inputs: %d inputs available", len(importInputs))
- result := awInputsExprRegex.ReplaceAllStringFunc(content, func(match string) string {
- // Extract the key name from the expression
- matches := awInputsExprRegex.FindStringSubmatch(match)
- if len(matches) < 2 {
+ substituteFunc := func(regex *regexp.Regexp, inputCategory string) func(string) string {
+ return func(match string) string {
+ matches := regex.FindStringSubmatch(match)
+ if len(matches) < 2 {
+ return match
+ }
+ path := matches[1]
+ // Resolve potentially dotted path (e.g. "config.apiKey" for object inputs)
+ if value, found := resolveImportInputPath(importInputs, path); found {
+ strValue := marshalImportInputValue(value)
+ expressionExtractionLog.Printf("Substituting github.aw.%s.%s with value: %s", inputCategory, path, strValue)
+ return strValue
+ }
+ expressionExtractionLog.Printf("Import input path not found: %s", path)
return match
}
+ }
- key := matches[1]
- if value, exists := importInputs[key]; exists {
- // Convert value to string
- strValue := fmt.Sprintf("%v", value)
- expressionExtractionLog.Printf("Substituting github.aw.inputs.%s with value: %s", key, strValue)
- return strValue
- }
-
- // If the key doesn't exist in importInputs, keep the original expression
- expressionExtractionLog.Printf("Import input key not found: %s", key)
- return match
- })
+ // Substitute ${{ github.aw.inputs. }} (legacy form)
+ result := awInputsExprRegex.ReplaceAllStringFunc(content, substituteFunc(awInputsExprRegex, "inputs"))
+ // Substitute ${{ github.aw.import-inputs. }} (import-schema form)
+ result = awImportInputsExprRegex.ReplaceAllStringFunc(result, substituteFunc(awImportInputsExprRegex, "import-inputs"))
return result
}
+
+// marshalImportInputValue serializes an import input value to a string suitable for
+// substitution into both YAML frontmatter and markdown prose.
+// Arrays and maps are serialized as JSON (which is valid YAML inline syntax).
+// Scalar values use Go's default string formatting.
+func marshalImportInputValue(value any) string {
+ switch v := value.(type) {
+ case []any:
+ if b, err := json.Marshal(v); err == nil {
+ return string(b)
+ }
+ case map[string]any:
+ if b, err := json.Marshal(v); err == nil {
+ return string(b)
+ }
+ }
+ return fmt.Sprintf("%v", value)
+}
+
+// resolveImportInputPath resolves a potentially dotted key path from the importInputs map.
+// For scalar inputs ("count"), it looks up importInputs["count"] directly.
+// For object sub-key paths ("config.apiKey"), it looks up importInputs["config"]["apiKey"],
+// supporting one level of nesting as defined by import-schema object types.
+// Returns the resolved value and true on success, or nil and false when the path is not found.
+func resolveImportInputPath(importInputs map[string]any, path string) (any, bool) {
+ topKey, subKey, hasDot := strings.Cut(path, ".")
+ if !hasDot {
+ // Scalar: direct lookup
+ value, ok := importInputs[topKey]
+ return value, ok
+ }
+ // Object sub-key: one-level deep lookup
+ topValue, ok := importInputs[topKey]
+ if !ok {
+ return nil, false
+ }
+ if obj, ok := topValue.(map[string]any); ok {
+ value, ok := obj[subKey]
+ return value, ok
+ }
+ return nil, false
+}
diff --git a/pkg/workflow/expression_patterns.go b/pkg/workflow/expression_patterns.go
index 303a8bb458..20385fe3fe 100644
--- a/pkg/workflow/expression_patterns.go
+++ b/pkg/workflow/expression_patterns.go
@@ -101,6 +101,17 @@ var (
// Used for extraction rather than validation
AWInputsExpressionPattern = regexp.MustCompile(`\$\{\{\s*github\.aw\.inputs\.([a-zA-Z0-9_-]+)\s*\}\}`)
+ // AWImportInputsPattern matches github.aw.import-inputs.* patterns for import-schema form.
+ // Supports both scalar inputs and one-level deep object sub-keys:
+ // github.aw.import-inputs.count
+ // github.aw.import-inputs.config.apiKey
+ AWImportInputsPattern = regexp.MustCompile(`^github\.aw\.import-inputs\.[a-zA-Z0-9_-]+(?:\.[a-zA-Z0-9_-]+)?$`)
+
+ // AWImportInputsExpressionPattern matches full ${{ github.aw.import-inputs.* }} expressions.
+ // Captures the full dotted path after "import-inputs." (e.g. "count" or "config.apiKey").
+ // Used for substitution of values provided via the 'with' key in import specifications.
+ AWImportInputsExpressionPattern = regexp.MustCompile(`\$\{\{\s*github\.aw\.import-inputs\.([a-zA-Z0-9_-]+(?:\.[a-zA-Z0-9_-]+)?)\s*\}\}`)
+
// EnvPattern matches env.* patterns
// Example: env.NODE_VERSION
EnvPattern = regexp.MustCompile(`^env\.[a-zA-Z0-9_-]+$`)
diff --git a/pkg/workflow/expression_safety_validation.go b/pkg/workflow/expression_safety_validation.go
index 9a9f7cf162..ba6771196f 100644
--- a/pkg/workflow/expression_safety_validation.go
+++ b/pkg/workflow/expression_safety_validation.go
@@ -28,6 +28,7 @@ var (
inputsRegex = regexp.MustCompile(`^github\.event\.inputs\.[a-zA-Z0-9_-]+$`)
workflowCallInputsRegex = regexp.MustCompile(`^inputs\.[a-zA-Z0-9_-]+$`)
awInputsRegex = regexp.MustCompile(`^github\.aw\.inputs\.[a-zA-Z0-9_-]+$`)
+ awImportInputsRegex = regexp.MustCompile(`^github\.aw\.import-inputs\.[a-zA-Z0-9_-]+(?:\.[a-zA-Z0-9_-]+)?$`)
envRegex = regexp.MustCompile(`^env\.[a-zA-Z0-9_-]+$`)
// comparisonExtractionRegex extracts property accesses from comparison expressions
// Matches patterns like "github.workflow == 'value'" and extracts "github.workflow"
@@ -70,6 +71,7 @@ func validateExpressionSafety(markdownContent string) error {
InputsRe: inputsRegex,
WorkflowCallInputsRe: workflowCallInputsRegex,
AwInputsRe: awInputsRegex,
+ AwImportInputsRe: awImportInputsRegex,
EnvRe: envRegex,
UnauthorizedExpressions: &unauthorizedExpressions,
})
@@ -84,6 +86,7 @@ func validateExpressionSafety(markdownContent string) error {
InputsRe: inputsRegex,
WorkflowCallInputsRe: workflowCallInputsRegex,
AwInputsRe: awInputsRegex,
+ AwImportInputsRe: awImportInputsRegex,
EnvRe: envRegex,
UnauthorizedExpressions: &unauthorizedExpressions,
})
@@ -123,6 +126,7 @@ func validateExpressionSafety(markdownContent string) error {
allowedList.WriteString(" - steps.*\n")
allowedList.WriteString(" - github.event.inputs.*\n")
allowedList.WriteString(" - github.aw.inputs.* (shared workflow inputs)\n")
+ allowedList.WriteString(" - github.aw.import-inputs.* (import-schema inputs)\n")
allowedList.WriteString(" - inputs.* (workflow_call)\n")
allowedList.WriteString(" - env.*\n")
@@ -144,6 +148,7 @@ type ExpressionValidationOptions struct {
InputsRe *regexp.Regexp
WorkflowCallInputsRe *regexp.Regexp
AwInputsRe *regexp.Regexp
+ AwImportInputsRe *regexp.Regexp
EnvRe *regexp.Regexp
UnauthorizedExpressions *[]string
}
@@ -207,6 +212,8 @@ func validateSingleExpression(expression string, opts ExpressionValidationOption
allowed = true
} else if opts.AwInputsRe.MatchString(expression) {
allowed = true
+ } else if opts.AwImportInputsRe != nil && opts.AwImportInputsRe.MatchString(expression) {
+ allowed = true
} else if opts.EnvRe.MatchString(expression) {
allowed = true
} else if slices.Contains(constants.AllowedExpressions, expression) {
@@ -263,6 +270,8 @@ func validateSingleExpression(expression string, opts ExpressionValidationOption
propertyAllowed = true
} else if opts.AwInputsRe.MatchString(property) {
propertyAllowed = true
+ } else if opts.AwImportInputsRe != nil && opts.AwImportInputsRe.MatchString(property) {
+ propertyAllowed = true
} else if opts.EnvRe.MatchString(property) {
propertyAllowed = true
} else if slices.Contains(constants.AllowedExpressions, property) {
diff --git a/pkg/workflow/frontmatter_types.go b/pkg/workflow/frontmatter_types.go
index bc1daaebf4..f88222d593 100644
--- a/pkg/workflow/frontmatter_types.go
+++ b/pkg/workflow/frontmatter_types.go
@@ -185,10 +185,11 @@ type FrontmatterConfig struct {
Cache map[string]any `json:"cache,omitempty"`
// Import and inclusion
- Imports any `json:"imports,omitempty"` // Can be string or array
- Include any `json:"include,omitempty"` // Can be string or array
- InlinedImports bool `json:"inlined-imports,omitempty"` // If true, inline all imports at compile time instead of using runtime-import macros
- Resources []string `json:"resources,omitempty"` // Additional workflow .md or action .yml files to fetch alongside this workflow
+ Imports any `json:"imports,omitempty"` // Can be string or array
+ ImportSchema map[string]any `json:"import-schema,omitempty"` // Schema for validating 'with' values when this workflow is imported
+ Include any `json:"include,omitempty"` // Can be string or array
+ InlinedImports bool `json:"inlined-imports,omitempty"` // If true, inline all imports at compile time instead of using runtime-import macros
+ Resources []string `json:"resources,omitempty"` // Additional workflow .md or action .yml files to fetch alongside this workflow
// Metadata
Metadata map[string]string `json:"metadata,omitempty"` // Custom metadata key-value pairs
diff --git a/pkg/workflow/import_schema_test.go b/pkg/workflow/import_schema_test.go
new file mode 100644
index 0000000000..db9d54ea4d
--- /dev/null
+++ b/pkg/workflow/import_schema_test.go
@@ -0,0 +1,662 @@
+//go:build !integration
+
+package workflow_test
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/github/gh-aw/pkg/stringutil"
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/github/gh-aw/pkg/workflow"
+)
+
+// TestImportWithUsesAndWith tests that imports can use 'uses'/'with' syntax as an
+// alias for 'path'/'inputs'.
+func TestImportWithUsesAndWith(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-import-uses-with-*")
+
+ sharedPath := filepath.Join(tempDir, "shared", "worker.md")
+ if err := os.MkdirAll(filepath.Dir(sharedPath), 0755); err != nil {
+ t.Fatalf("Failed to create shared directory: %v", err)
+ }
+
+ sharedContent := `---
+import-schema:
+ region:
+ description: AWS region to target
+ type: string
+ required: true
+ count:
+ description: Number of items
+ type: number
+ default: 10
+---
+
+# Worker Instructions
+
+Deploy ${{ github.aw.import-inputs.count }} items to ${{ github.aw.import-inputs.region }}.
+`
+ if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil {
+ t.Fatalf("Failed to write shared file: %v", err)
+ }
+
+ workflowPath := filepath.Join(tempDir, "main.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/worker.md
+ with:
+ region: us-east-1
+ count: 5
+---
+
+# Main Workflow
+
+Runs the worker.
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := workflow.NewCompiler()
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("CompileWorkflow failed: %v", err)
+ }
+
+ lockFilePath := stringutil.MarkdownToLockFile(workflowPath)
+ lockFileContent, err := os.ReadFile(lockFilePath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ lockContent := string(lockFileContent)
+
+ if !strings.Contains(lockContent, "5 items") {
+ t.Errorf("Expected lock file to contain substituted count '5 items', got:\n%s", lockContent)
+ }
+ if !strings.Contains(lockContent, "us-east-1") {
+ t.Errorf("Expected lock file to contain substituted region 'us-east-1'")
+ }
+
+ if strings.Contains(lockContent, "github.aw.import-inputs.region") {
+ t.Error("Expected github.aw.import-inputs.region to be substituted in lock file")
+ }
+ if strings.Contains(lockContent, "github.aw.import-inputs.count") {
+ t.Error("Expected github.aw.import-inputs.count to be substituted in lock file")
+ }
+}
+
+// TestImportSchemaValidationMissingRequired tests that the compiler rejects imports
+// that are missing a required 'with' value declared in import-schema.
+func TestImportSchemaValidationMissingRequired(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-import-schema-missing-*")
+
+ sharedPath := filepath.Join(tempDir, "shared", "required.md")
+ if err := os.MkdirAll(filepath.Dir(sharedPath), 0755); err != nil {
+ t.Fatalf("Failed to create shared directory: %v", err)
+ }
+
+ sharedContent := `---
+import-schema:
+ region:
+ description: AWS region
+ type: string
+ required: true
+---
+
+# Shared Instructions
+
+Region: ${{ github.aw.import-inputs.region }}.
+`
+ if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil {
+ t.Fatalf("Failed to write shared file: %v", err)
+ }
+
+ workflowPath := filepath.Join(tempDir, "main.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/required.md
+ with: {}
+---
+
+# Main Workflow
+
+Missing required input.
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail due to missing required 'with' input, but it succeeded")
+ }
+ if !strings.Contains(err.Error(), "region") {
+ t.Errorf("Expected error to mention 'region', got: %v", err)
+ }
+}
+
+// TestImportSchemaValidationUnknownKey tests that the compiler rejects imports
+// that provide an unknown 'with' key not declared in import-schema.
+func TestImportSchemaValidationUnknownKey(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-import-schema-unknown-*")
+
+ sharedPath := filepath.Join(tempDir, "shared", "typed.md")
+ if err := os.MkdirAll(filepath.Dir(sharedPath), 0755); err != nil {
+ t.Fatalf("Failed to create shared directory: %v", err)
+ }
+
+ sharedContent := `---
+import-schema:
+ region:
+ type: string
+---
+
+# Shared Instructions
+
+Region: ${{ github.aw.import-inputs.region }}.
+`
+ if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil {
+ t.Fatalf("Failed to write shared file: %v", err)
+ }
+
+ workflowPath := filepath.Join(tempDir, "main.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/typed.md
+ with:
+ region: us-east-1
+ unknown_param: foo
+---
+
+# Main Workflow
+
+Has unknown key.
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail due to unknown 'with' key, but it succeeded")
+ }
+ if !strings.Contains(err.Error(), "unknown_param") {
+ t.Errorf("Expected error to mention 'unknown_param', got: %v", err)
+ }
+}
+
+// TestImportSchemaChoiceValidation tests that choice type validation works.
+func TestImportSchemaChoiceValidation(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-import-schema-choice-*")
+
+ sharedPath := filepath.Join(tempDir, "shared", "env.md")
+ if err := os.MkdirAll(filepath.Dir(sharedPath), 0755); err != nil {
+ t.Fatalf("Failed to create shared directory: %v", err)
+ }
+
+ sharedContent := `---
+import-schema:
+ environment:
+ type: choice
+ options:
+ - staging
+ - production
+ required: true
+---
+
+# Environment Instructions
+
+Deploy to ${{ github.aw.import-inputs.environment }}.
+`
+ if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil {
+ t.Fatalf("Failed to write shared file: %v", err)
+ }
+
+ t.Run("valid choice value", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "valid.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/env.md
+ with:
+ environment: staging
+---
+
+# Valid Choice
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Expected compilation to succeed with valid choice, got: %v", err)
+ }
+ })
+
+ t.Run("invalid choice value", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "invalid.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/env.md
+ with:
+ environment: development
+---
+
+# Invalid Choice
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail for invalid choice value")
+ }
+ if !strings.Contains(err.Error(), "development") {
+ t.Errorf("Expected error to mention 'development', got: %v", err)
+ }
+ })
+}
+
+// TestImportSchemaNoSchemaBackwardCompat tests that imports without import-schema
+// still work (backward compatibility).
+func TestImportSchemaNoSchemaBackwardCompat(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-import-no-schema-*")
+
+ sharedPath := filepath.Join(tempDir, "shared", "noschema.md")
+ if err := os.MkdirAll(filepath.Dir(sharedPath), 0755); err != nil {
+ t.Fatalf("Failed to create shared directory: %v", err)
+ }
+
+ // Shared workflow uses old-style 'inputs' field (no import-schema)
+ sharedContent := `---
+inputs:
+ count:
+ type: number
+ default: 10
+---
+
+# No Schema Instructions
+
+Count: ${{ github.aw.inputs.count }}.
+`
+ if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil {
+ t.Fatalf("Failed to write shared file: %v", err)
+ }
+
+ workflowPath := filepath.Join(tempDir, "main.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/noschema.md
+ with:
+ count: 42
+---
+
+# Main Workflow
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := workflow.NewCompiler()
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("CompileWorkflow failed (backward compat): %v", err)
+ }
+
+ lockFilePath := stringutil.MarkdownToLockFile(workflowPath)
+ lockFileContent, err := os.ReadFile(lockFilePath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ lockContent := string(lockFileContent)
+
+ if !strings.Contains(lockContent, "Count: 42") {
+ t.Errorf("Expected lock file to contain 'Count: 42'")
+ }
+}
+
+// TestImportSchemaObjectType tests that object type inputs with one-level deep
+// properties are validated and that sub-fields are accessible via dotted expressions.
+func TestImportSchemaObjectType(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-import-schema-object-*")
+
+ sharedPath := filepath.Join(tempDir, "shared", "config-worker.md")
+ if err := os.MkdirAll(filepath.Dir(sharedPath), 0755); err != nil {
+ t.Fatalf("Failed to create shared directory: %v", err)
+ }
+
+ sharedContent := `---
+import-schema:
+ config:
+ type: object
+ description: Configuration object
+ properties:
+ apiKey:
+ type: string
+ required: true
+ timeout:
+ type: number
+ default: 30
+ region:
+ type: string
+ required: true
+---
+
+# Config Worker Instructions
+
+API key: ${{ github.aw.import-inputs.config.apiKey }}.
+Timeout: ${{ github.aw.import-inputs.config.timeout }}.
+Region: ${{ github.aw.import-inputs.region }}.
+`
+ if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil {
+ t.Fatalf("Failed to write shared file: %v", err)
+ }
+
+ t.Run("valid object input substitution", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "valid.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/config-worker.md
+ with:
+ config:
+ apiKey: my-secret-key
+ timeout: 60
+ region: eu-west-1
+---
+
+# Valid Object
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Expected compilation to succeed with valid object input, got: %v", err)
+ }
+
+ lockFilePath := stringutil.MarkdownToLockFile(workflowPath)
+ lockContent, err := os.ReadFile(lockFilePath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ content := string(lockContent)
+
+ if !strings.Contains(content, "my-secret-key") {
+ t.Errorf("Expected lock file to contain substituted apiKey 'my-secret-key'")
+ }
+ if !strings.Contains(content, "60") {
+ t.Errorf("Expected lock file to contain substituted timeout '60'")
+ }
+ if !strings.Contains(content, "eu-west-1") {
+ t.Errorf("Expected lock file to contain substituted region 'eu-west-1'")
+ }
+ if strings.Contains(content, "github.aw.import-inputs.config.apiKey") {
+ t.Error("Expected expression to be substituted in lock file")
+ }
+ })
+
+ t.Run("missing required sub-property", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "missing-sub.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/config-worker.md
+ with:
+ config:
+ timeout: 60
+ region: eu-west-1
+---
+
+# Missing required sub-prop
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail due to missing required 'apiKey'")
+ }
+ if !strings.Contains(err.Error(), "apiKey") {
+ t.Errorf("Expected error to mention 'apiKey', got: %v", err)
+ }
+ })
+
+ t.Run("unknown sub-property", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "unknown-sub.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/config-worker.md
+ with:
+ config:
+ apiKey: key
+ unknownProp: value
+ region: eu-west-1
+---
+
+# Unknown sub-prop
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail due to unknown sub-property")
+ }
+ if !strings.Contains(err.Error(), "unknownProp") {
+ t.Errorf("Expected error to mention 'unknownProp', got: %v", err)
+ }
+ })
+}
+
+// TestImportSchemaArrayType tests that array type inputs are validated and substituted
+// correctly, including as a YAML inline array in the imported workflow's mcp-servers.serena field.
+func TestImportSchemaArrayType(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-import-schema-array-*")
+
+ sharedPath := filepath.Join(tempDir, "shared", "mcp", "serena.md")
+ if err := os.MkdirAll(filepath.Dir(sharedPath), 0755); err != nil {
+ t.Fatalf("Failed to create shared directory: %v", err)
+ }
+
+ // Shared workflow with mcp-servers.serena parameterised via import-schema
+ sharedContent := `---
+import-schema:
+ languages:
+ type: array
+ items:
+ type: string
+ required: true
+ description: Languages to enable for Serena analysis
+
+mcp-servers:
+ serena: ${{ github.aw.import-inputs.languages }}
+---
+
+## Serena Analysis
+
+Configured for languages: ${{ github.aw.import-inputs.languages }}.
+`
+ if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil {
+ t.Fatalf("Failed to write shared file: %v", err)
+ }
+
+ t.Run("valid array input configures serena tools", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "valid.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/mcp/serena.md
+ with:
+ languages:
+ - go
+ - typescript
+---
+
+# Valid Array Input
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Expected compilation to succeed, got: %v", err)
+ }
+
+ lockFilePath := stringutil.MarkdownToLockFile(workflowPath)
+ lockContent, err := os.ReadFile(lockFilePath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ content := string(lockContent)
+
+ // The serena tool should be configured with both languages
+ if !strings.Contains(content, "go") {
+ t.Errorf("Expected lock file to contain 'go' in serena config")
+ }
+ if !strings.Contains(content, "typescript") {
+ t.Errorf("Expected lock file to contain 'typescript' in serena config")
+ }
+ // The markdown body expression should be substituted too
+ if strings.Contains(content, "github.aw.import-inputs.languages") {
+ t.Error("Expected import-inputs expression to be substituted in lock file")
+ }
+ })
+
+ t.Run("wrong type for array input is rejected", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "wrong-type.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/mcp/serena.md
+ with:
+ languages: "go"
+---
+
+# Wrong type
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail because 'languages' should be an array, not a string")
+ }
+ if !strings.Contains(err.Error(), "languages") {
+ t.Errorf("Expected error to mention 'languages', got: %v", err)
+ }
+ })
+
+ t.Run("array items type validated", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "wrong-item-type.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/mcp/serena.md
+ with:
+ languages:
+ - go
+ - 42
+---
+
+# Wrong item type
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail because array items should be strings, not numbers")
+ }
+ })
+
+ t.Run("missing required array input", func(t *testing.T) {
+ workflowPath := filepath.Join(tempDir, "missing-required.md")
+ workflowContent := `---
+on: issues
+permissions:
+ contents: read
+ issues: read
+engine: copilot
+imports:
+ - uses: shared/mcp/serena.md
+ with: {}
+---
+
+# Missing required
+`
+ if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+ compiler := workflow.NewCompiler()
+ err := compiler.CompileWorkflow(workflowPath)
+ if err == nil {
+ t.Fatal("Expected compilation to fail because 'languages' is required")
+ }
+ if !strings.Contains(err.Error(), "languages") {
+ t.Errorf("Expected error to mention 'languages', got: %v", err)
+ }
+ })
+}
diff --git a/pkg/workflow/mcp_renderer.go b/pkg/workflow/mcp_renderer.go
index 2757daeb2b..c20c020cb8 100644
--- a/pkg/workflow/mcp_renderer.go
+++ b/pkg/workflow/mcp_renderer.go
@@ -150,6 +150,13 @@ func RenderJSONMCPConfig(
options.Renderers.RenderQmd(&configBuilder, qmdTool, isLast, workflowData)
}
case "serena":
+ // If serena has an explicit MCP server config (with container field), use the
+ // custom renderer instead of the built-in hardcoded serena renderer.
+ // This allows shared workflows like shared/mcp/serena.md to configure a
+ // complete MCP server without relying on the deprecated tools.serena path.
+ if HandleCustomMCPToolInSwitch(&configBuilder, toolName, tools, isLast, options.Renderers.RenderCustomMCPConfig) {
+ break
+ }
serenaTool := tools["serena"]
options.Renderers.RenderSerena(&configBuilder, serenaTool, isLast)
case "cache-memory":
diff --git a/pkg/workflow/serena_go_import_test.go b/pkg/workflow/serena_go_import_test.go
new file mode 100644
index 0000000000..b57c9927cc
--- /dev/null
+++ b/pkg/workflow/serena_go_import_test.go
@@ -0,0 +1,208 @@
+//go:build !integration
+
+package workflow_test
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+
+ "github.com/github/gh-aw/pkg/stringutil"
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/github/gh-aw/pkg/workflow"
+)
+
+// TestImportSerenaGoMD tests that importing shared/mcp/serena-go.md results in the
+// Serena MCP server being present in the compiled MCP config (container, entrypoint,
+// entrypointArgs, mounts). This is the end-to-end chain:
+//
+// main.md → shared/mcp/serena-go.md (uses/with) → shared/mcp/serena.md (explicit MCP config)
+func TestImportSerenaGoMD(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-serena-go-*")
+
+ // Create directory structure mirroring .github/workflows/shared/mcp/
+ sharedMCPDir := filepath.Join(tempDir, "shared", "mcp")
+ require.NoError(t, os.MkdirAll(sharedMCPDir, 0755), "create shared/mcp dir")
+
+ // serena.md: parameterized shared workflow with complete explicit MCP server config
+ serenaPath := filepath.Join(sharedMCPDir, "serena.md")
+ serenaContent := `---
+import-schema:
+ languages:
+ type: array
+ items:
+ type: string
+ required: true
+
+mcp-servers:
+ serena:
+ container: "ghcr.io/github/serena-mcp-server:latest"
+ args:
+ - "--network"
+ - "host"
+ entrypoint: "serena"
+ entrypointArgs:
+ - "start-mcp-server"
+ - "--context"
+ - "codex"
+ - "--project"
+ - \${GITHUB_WORKSPACE}
+ mounts:
+ - \${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw
+---
+
+## Serena Code Analysis
+
+The Serena MCP server is configured for code analysis.
+`
+ require.NoError(t, os.WriteFile(serenaPath, []byte(serenaContent), 0644), "write serena.md")
+
+ // serena-go.md: convenience wrapper that imports serena.md for Go
+ serenaGoPath := filepath.Join(sharedMCPDir, "serena-go.md")
+ serenaGoContent := `---
+imports:
+ - uses: shared/mcp/serena.md
+ with:
+ languages: ["go"]
+---
+
+## Serena Go Code Analysis
+
+The Serena MCP server is configured for Go analysis.
+`
+ require.NoError(t, os.WriteFile(serenaGoPath, []byte(serenaGoContent), 0644), "write serena-go.md")
+
+ // main workflow that imports serena-go.md
+ workflowPath := filepath.Join(tempDir, "main-workflow.md")
+ workflowContent := `---
+on: issues
+engine: copilot
+imports:
+ - shared/mcp/serena-go.md
+permissions:
+ contents: read
+ issues: read
+ pull-requests: read
+---
+
+# Main Workflow
+
+Uses Serena for Go code analysis.
+`
+ require.NoError(t, os.WriteFile(workflowPath, []byte(workflowContent), 0644), "write main workflow")
+
+ // Compile the workflow
+ compiler := workflow.NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(workflowPath), "CompileWorkflow")
+
+ // Read the generated lock file
+ lockFilePath := stringutil.MarkdownToLockFile(workflowPath)
+ lockFileContent, err := os.ReadFile(lockFilePath)
+ require.NoError(t, err, "read lock file")
+
+ lockContent := string(lockFileContent)
+
+ // Serena MCP server must be present in the gateway config
+ assert.Contains(t, lockContent, `"serena"`, "lock file should contain serena MCP server entry")
+
+ // Container image must be correct
+ assert.Contains(t, lockContent, "ghcr.io/github/serena-mcp-server:latest",
+ "lock file should contain serena Docker container image")
+
+ // Entrypoint must be set
+ assert.Contains(t, lockContent, "serena", "lock file should contain serena entrypoint")
+
+ // Docker image download step must include serena-mcp-server
+ assert.Contains(t, lockContent, "download_docker_images.sh",
+ "lock file should have docker image download step")
+ assert.True(t,
+ strings.Contains(lockContent, "ghcr.io/github/serena-mcp-server"),
+ "docker image download step should include serena-mcp-server image")
+
+ // Verify start-mcp-server entrypoint args are present
+ assert.Contains(t, lockContent, "start-mcp-server",
+ "lock file should contain start-mcp-server entrypoint arg")
+
+ // Verify workspace mount is present
+ assert.Contains(t, lockContent, "GITHUB_WORKSPACE",
+ "lock file should reference GITHUB_WORKSPACE for workspace mount")
+}
+
+// TestImportSerenaWithLanguagesMD tests that importing shared/mcp/serena.md with
+// explicit languages=[go, typescript] produces a working MCP config.
+func TestImportSerenaWithLanguagesMD(t *testing.T) {
+ tempDir := testutil.TempDir(t, "test-serena-langs-*")
+
+ sharedMCPDir := filepath.Join(tempDir, "shared", "mcp")
+ require.NoError(t, os.MkdirAll(sharedMCPDir, 0755), "create shared/mcp dir")
+
+ // serena.md: parameterized shared workflow
+ serenaPath := filepath.Join(sharedMCPDir, "serena.md")
+ serenaContent := `---
+import-schema:
+ languages:
+ type: array
+ items:
+ type: string
+ required: true
+
+mcp-servers:
+ serena:
+ container: "ghcr.io/github/serena-mcp-server:latest"
+ args:
+ - "--network"
+ - "host"
+ entrypoint: "serena"
+ entrypointArgs:
+ - "start-mcp-server"
+ - "--context"
+ - "codex"
+ - "--project"
+ - \${GITHUB_WORKSPACE}
+ mounts:
+ - \${GITHUB_WORKSPACE}:\${GITHUB_WORKSPACE}:rw
+---
+
+## Serena Code Analysis
+`
+ require.NoError(t, os.WriteFile(serenaPath, []byte(serenaContent), 0644), "write serena.md")
+
+ workflowPath := filepath.Join(tempDir, "main-workflow.md")
+ workflowContent := `---
+on: issues
+engine: copilot
+imports:
+ - uses: shared/mcp/serena.md
+ with:
+ languages: ["go", "typescript"]
+permissions:
+ contents: read
+ issues: read
+ pull-requests: read
+---
+
+# Main Workflow
+
+Uses Serena for Go and TypeScript analysis.
+`
+ require.NoError(t, os.WriteFile(workflowPath, []byte(workflowContent), 0644), "write main workflow")
+
+ compiler := workflow.NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(workflowPath), "CompileWorkflow")
+
+ lockFilePath := stringutil.MarkdownToLockFile(workflowPath)
+ lockFileContent, err := os.ReadFile(lockFilePath)
+ require.NoError(t, err, "read lock file")
+
+ lockContent := string(lockFileContent)
+
+ assert.Contains(t, lockContent, `"serena"`, "lock file should contain serena MCP entry")
+ assert.Contains(t, lockContent, "ghcr.io/github/serena-mcp-server:latest",
+ "lock file should contain serena container image")
+ assert.Contains(t, lockContent, "start-mcp-server",
+ "lock file should contain serena entrypoint args")
+}
diff --git a/pkg/workflow/strict_mode_serena_test.go b/pkg/workflow/strict_mode_serena_test.go
index fdfec8dba5..84fea48309 100644
--- a/pkg/workflow/strict_mode_serena_test.go
+++ b/pkg/workflow/strict_mode_serena_test.go
@@ -7,6 +7,7 @@ import (
)
// TestValidateStrictTools_SerenaDockerMode tests that serena docker mode is allowed in strict mode
+// (tools.serena is deprecated but still accepted with a warning, not a strict mode error)
func TestValidateStrictTools_SerenaDockerMode(t *testing.T) {
compiler := NewCompiler()
frontmatter := map[string]any{
@@ -23,11 +24,12 @@ func TestValidateStrictTools_SerenaDockerMode(t *testing.T) {
err := compiler.validateStrictTools(frontmatter)
if err != nil {
- t.Errorf("Expected no error for serena docker mode in strict mode, got: %v", err)
+ t.Errorf("Expected no error for serena docker mode in strict mode (only a warning), got: %v", err)
}
}
-// TestValidateStrictTools_SerenaNoMode tests that serena without mode is allowed (defaults to docker)
+// TestValidateStrictTools_SerenaNoMode tests that serena without mode is allowed in strict mode
+// (tools.serena is deprecated but still accepted with a warning, not a strict mode error)
func TestValidateStrictTools_SerenaNoMode(t *testing.T) {
compiler := NewCompiler()
frontmatter := map[string]any{
@@ -43,7 +45,7 @@ func TestValidateStrictTools_SerenaNoMode(t *testing.T) {
err := compiler.validateStrictTools(frontmatter)
if err != nil {
- t.Errorf("Expected no error for serena without mode in strict mode, got: %v", err)
+ t.Errorf("Expected no error for serena without mode in strict mode (only a warning), got: %v", err)
}
}
diff --git a/pkg/workflow/tools_parser.go b/pkg/workflow/tools_parser.go
index 4a4e28afdc..c74101a3b8 100644
--- a/pkg/workflow/tools_parser.go
+++ b/pkg/workflow/tools_parser.go
@@ -25,7 +25,7 @@
// - web-search: Web search capabilities
// - edit: File editing operations
// - playwright: Browser automation
-// - serena: Serena integration
+// - serena: Serena integration (deprecated, use mcp-servers.serena with shared/mcp/serena.md)
// - agentic-workflows: Nested workflow execution
// - cache-memory: In-workflow memory caching
// - repo-memory: Repository-backed persistent memory