Skip to content

release

release #1172

Workflow file for this run

name: release
on:
# Auto-trigger after all build/test workflows complete
workflow_run:
workflows: ["wheels", "wheels-docker", "wheels-arm64", "wstest", "main"]
types: [completed]
# Manual dispatch for debugging
workflow_dispatch:
jobs:
check-all-workflows:
name: Check if all workflows completed
runs-on: ubuntu-latest
outputs:
all_complete: ${{ steps.check.outputs.all_complete }}
wheels_run_id: ${{ steps.check.outputs.wheels_run_id }}
wheels_docker_run_id: ${{ steps.check.outputs.wheels_docker_run_id }}
wheels_arm64_run_id: ${{ steps.check.outputs.wheels_arm64_run_id }}
wstest_run_id: ${{ steps.check.outputs.wstest_run_id }}
main_run_id: ${{ steps.check.outputs.main_run_id }}
# Dynamic artifact names (with meta-checksum suffixes)
artifact_macos_wheels: ${{ steps.check.outputs.artifact_macos_wheels }}
artifact_windows_wheels: ${{ steps.check.outputs.artifact_windows_wheels }}
artifact_source_dist: ${{ steps.check.outputs.artifact_source_dist }}
artifact_linux_no_nvx: ${{ steps.check.outputs.artifact_linux_no_nvx }}
artifact_manylinux_x86_64: ${{ steps.check.outputs.artifact_manylinux_x86_64 }}
artifact_arm64_cp311: ${{ steps.check.outputs.artifact_arm64_cp311 }}
artifact_arm64_cp313: ${{ steps.check.outputs.artifact_arm64_cp313 }}
artifact_arm64_pypy_bookworm: ${{ steps.check.outputs.artifact_arm64_pypy_bookworm }}
artifact_arm64_pypy_trixie: ${{ steps.check.outputs.artifact_arm64_pypy_trixie }}
steps:
- name: Check all required workflows completed
id: check
uses: actions/github-script@v7
with:
script: |
const requiredWorkflows = ['wheels', 'wheels-docker', 'wheels-arm64', 'wstest', 'main'];
// Handle both workflow_run and workflow_dispatch triggers
const commitSha = context.payload.workflow_run?.head_sha || context.sha;
const triggeredBy = context.payload.workflow_run?.name || 'manual (workflow_dispatch)';
console.log('─────────────────────────────────────────────────');
console.log('🔍 Checking workflow completion status');
console.log('─────────────────────────────────────────────────');
console.log(`Event: ${context.eventName}`);
console.log(`Commit SHA: ${commitSha}`);
console.log(`Triggered by: ${triggeredBy}`);
console.log('');
// Get all workflow runs for this commit
const { data: runs } = await github.rest.actions.listWorkflowRunsForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
head_sha: commitSha,
per_page: 100
});
// Group by workflow name and find latest run for each
const latestRuns = {};
for (const run of runs.workflow_runs) {
const workflowName = run.name;
if (requiredWorkflows.includes(workflowName)) {
if (!latestRuns[workflowName] || run.id > latestRuns[workflowName].id) {
latestRuns[workflowName] = run;
}
}
}
// Check if all required workflows completed successfully
console.log('Required workflows status:');
const allComplete = requiredWorkflows.every(name => {
const run = latestRuns[name];
const complete = run && run.status === 'completed' && run.conclusion === 'success';
const status = run ? `${run.status}/${run.conclusion}` : 'not found';
console.log(` ${complete ? '✅' : '⏳'} ${name.padEnd(20)} : ${status}`);
return complete;
});
console.log('');
if (!allComplete) {
console.log('⏳ Not all workflows complete yet - exiting early');
console.log(' This is normal! Release will proceed once all workflows finish.');
} else {
console.log('✅ All workflows complete - proceeding with release!');
}
console.log('─────────────────────────────────────────────────');
core.setOutput('all_complete', allComplete ? 'true' : 'false');
// Output run IDs for artifact downloads (using sanitized names)
core.setOutput('wheels_run_id', latestRuns['wheels']?.id || '');
core.setOutput('wheels_docker_run_id', latestRuns['wheels-docker']?.id || '');
core.setOutput('wheels_arm64_run_id', latestRuns['wheels-arm64']?.id || '');
core.setOutput('wstest_run_id', latestRuns['wstest']?.id || '');
core.setOutput('main_run_id', latestRuns['main']?.id || '');
// Query artifact names with meta-checksum suffixes
if (allComplete) {
console.log('');
console.log('─────────────────────────────────────────────────');
console.log('🔍 Querying unique artifact names');
console.log('─────────────────────────────────────────────────');
// Helper function to find artifact by prefix
async function findArtifact(runId, prefix) {
if (!runId) return '';
try {
const { data: artifacts } = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: runId
});
const artifact = artifacts.artifacts.find(a => a.name.startsWith(prefix + '-'));
if (artifact) {
console.log(` ✅ ${prefix.padEnd(45)} → ${artifact.name}`);
return artifact.name;
} else {
console.log(` ⚠️ ${prefix.padEnd(45)} → NOT FOUND`);
return '';
}
} catch (error) {
console.log(` ❌ ${prefix.padEnd(45)} → ERROR: ${error.message}`);
return '';
}
}
// Query artifacts from wheels workflow
const wheelsRunId = latestRuns['wheels']?.id;
core.setOutput('artifact_macos_wheels', await findArtifact(wheelsRunId, 'wheels-macos-arm64'));
core.setOutput('artifact_windows_wheels', await findArtifact(wheelsRunId, 'wheels-windows-x86_64'));
core.setOutput('artifact_source_dist', await findArtifact(wheelsRunId, 'source-distribution'));
core.setOutput('artifact_linux_no_nvx', await findArtifact(wheelsRunId, 'linux-wheels-no-nvx'));
// Query artifacts from wheels-docker workflow
const wheelsDockerRunId = latestRuns['wheels-docker']?.id;
core.setOutput('artifact_manylinux_x86_64', await findArtifact(wheelsDockerRunId, 'artifacts-manylinux_2_28_x86_64'));
// Query artifacts from wheels-arm64 workflow
const wheelsArm64RunId = latestRuns['wheels-arm64']?.id;
core.setOutput('artifact_arm64_cp311', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-cpython-3.11-manylinux_2_28_aarch64'));
core.setOutput('artifact_arm64_cp313', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-cpython-3.13-manylinux_2_28_aarch64'));
core.setOutput('artifact_arm64_pypy_bookworm', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-pypy-3.11-bookworm-manylinux_2_36_aarch64'));
core.setOutput('artifact_arm64_pypy_trixie', await findArtifact(wheelsArm64RunId, 'artifacts-arm64-pypy-3.11-trixie-manylinux_2_38_aarch64'));
console.log('─────────────────────────────────────────────────');
}
identifiers:
needs: check-all-workflows
if: needs.check-all-workflows.outputs.all_complete == 'true'
# GitHub needs to know where .cicd/workflows/identifiers.yml lives at parse time,
# and submodules aren't included in that context! thus the following does NOT work:
# uses: ./.cicd/workflows/identifiers.yml
# we MUST reference the remote repo directly:
uses: wamp-proto/wamp-cicd/.github/workflows/identifiers.yml@main
# IMPORTANT: we still need .cicd as a Git submodule in the using repo though!
# because e.g. identifiers.yml wants to access scripts/sanitize.sh !
# Development GitHub releases (consolidates wheels from both workflows)
release-development:
name: Development GitHub Release
needs: [check-all-workflows, identifiers]
runs-on: ubuntu-latest
# Only create releases for development builds (explicit positive list)
if: |
needs.check-all-workflows.outputs.all_complete == 'true' &&
(github.event_name == 'workflow_dispatch' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')) &&
needs.identifiers.outputs.release_type == 'development'
env:
RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }}
RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Download and verify macOS wheels with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_macos_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify Windows wheels with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_windows_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify source distribution with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_source_dist }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Debug - List downloaded files
run: |
echo "======================================================================"
echo "==> DEBUG: Files in dist/ after downloading source-distribution"
echo "======================================================================"
echo "Using wheels_run_id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}"
echo ""
ls -la dist/
echo ""
echo "*.tar.gz files:"
find dist/ -name "*.tar.gz" -ls || echo "None found"
echo ""
echo "*.verify.txt files:"
find dist/ -name "*.verify.txt" -ls || echo "None found"
echo ""
shell: bash
- name: Re-verify source distribution integrity (chain of custody)
run: |
echo "======================================================================"
echo "==> Source Distribution Re-Verification (Chain of Custody)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying artifact integrity at release workflow."
echo "Comparing against original verification from wheels workflow."
echo ""
HAS_ERRORS=0
TARBALLS_VERIFIED=0
for tarball in dist/*.tar.gz; do
if [ ! -f "$tarball" ]; then
echo "⚠️ No source distribution found - skipping verification"
continue
fi
BASENAME=$(basename "$tarball")
VERIFY_FILE="dist/${BASENAME%.tar.gz}.verify.txt"
if [ ! -f "$VERIFY_FILE" ]; then
echo "⚠️ Warning: No original verification report found for $BASENAME"
echo " Expected: $VERIFY_FILE"
echo " Artifact may have been created without verification."
echo ""
HAS_ERRORS=1
continue
fi
echo "==> Re-verifying: $BASENAME"
echo ""
TARBALLS_VERIFIED=$((TARBALLS_VERIFIED + 1))
# Re-compute SHA256 hash
echo "Computing current SHA256 fingerprint..."
CURRENT_SHA256=$(openssl sha256 "$tarball" | awk '{print $2}')
echo "Current SHA256: $CURRENT_SHA256"
echo ""
# Extract original SHA256 from verification report
echo "Extracting original SHA256 from verification report..."
echo "DEBUG: Contents of $VERIFY_FILE:"
cat "$VERIFY_FILE"
echo ""
echo "DEBUG: Lines matching 'SHA256':"
grep -i "SHA256" "$VERIFY_FILE" || echo "(no matches found)"
echo ""
ORIGINAL_SHA256=$(grep -E "^SHA(2-)?256\(" "$VERIFY_FILE" | awk -F'= ' '{print $2}' | tr -d ' ' || echo "")
if [ -z "$ORIGINAL_SHA256" ]; then
echo "❌ ERROR: Could not extract SHA256 from verification report"
echo " The verification report may have an unexpected format"
HAS_ERRORS=1
continue
fi
echo "Original SHA256: $ORIGINAL_SHA256"
echo ""
# Compare hashes
if [ "$CURRENT_SHA256" = "$ORIGINAL_SHA256" ]; then
echo "✅ SHA256 MATCH - Artifact integrity confirmed through pipeline"
else
echo "❌ SHA256 MISMATCH - Artifact corrupted during transfer!"
echo ""
echo "This indicates corruption between:"
echo " 1. wheels workflow (artifact creation)"
echo " 2. release workflow (artifact consumption)"
echo ""
echo "Expected: $ORIGINAL_SHA256"
echo "Got: $CURRENT_SHA256"
echo ""
HAS_ERRORS=1
fi
echo ""
# Re-run gzip integrity test
echo "Re-running gzip integrity test..."
if gzip -tv "$tarball" 2>&1 | tee /tmp/gzip_output.txt; then
GZIP_EXIT=$?
if [ $GZIP_EXIT -eq 0 ]; then
echo "✅ Gzip test PASS"
else
echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)"
HAS_ERRORS=1
fi
else
GZIP_EXIT=$?
echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)"
cat /tmp/gzip_output.txt
HAS_ERRORS=1
fi
echo ""
# Re-run tar extraction test
echo "Re-running tar extraction test..."
if tar -tzf "$tarball" > /dev/null 2>&1; then
echo "✅ Tar extraction test PASS"
else
TAR_EXIT=$?
echo "❌ Tar extraction test FAIL (exit code $TAR_EXIT)"
HAS_ERRORS=1
fi
echo ""
echo "------------------------------------------------------------------------"
echo "Original verification report (first 30 lines):"
echo "------------------------------------------------------------------------"
head -30 "$VERIFY_FILE"
echo ""
echo "... (full report available in dist/$VERIFY_FILE)"
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "Source distribution failed integrity checks at release workflow."
echo "This indicates either:"
echo " 1. Corruption during artifact transfer"
echo " 2. Packaging bug not caught at origin"
echo ""
echo "DO NOT PROCEED WITH RELEASE - investigate and fix first."
echo ""
exit 1
elif [ $TARBALLS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ RE-VERIFICATION FAILED - NO SOURCE DISTRIBUTIONS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero source distributions were verified. This means:"
echo " 1. No *.tar.gz files were found in dist/, OR"
echo " 2. Source distribution download from artifacts failed"
echo ""
echo "This is a critical failure - we cannot confirm source distribution integrity."
echo "This was the root cause of issue #1714 (corrupted v25.10.1 on PyPI)."
echo ""
echo "DO NOT PROCEED WITH RELEASE - investigate artifact download!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All source distributions re-verified successfully ($TARBALLS_VERIFIED tarballs)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: wheels workflow → release workflow"
echo "Cryptographic integrity maintained throughout pipeline."
fi
shell: bash
- name: Download and verify Linux wheels without NVX with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_linux_no_nvx }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify manylinux x86_64 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_manylinux_x86_64 }}
path: ${{ github.workspace }}/wheelhouse/
run-id: ${{ needs.check-all-workflows.outputs.wheels_docker_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 CPython 3.11 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_cp311 }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 CPython 3.13 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_cp313 }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 PyPy 3.11 Bookworm artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_pypy_bookworm }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 PyPy 3.11 Trixie artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_pypy_trixie }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
# Download wstest conformance summary (explicit enumeration, no pattern)
- name: Download wstest conformance summary (quick)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: conformance-summary-quick
path: ${{ github.workspace }}/wstest-results/
run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
# Download FlatBuffers generated code (explicit enumeration for each matrix env)
- name: Download FlatBuffers generated code (cpy314)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-gen-cpy314
path: ${{ github.workspace }}/flatbuffers-gen/cpy314/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers generated code (cpy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-gen-cpy311
path: ${{ github.workspace }}/flatbuffers-gen/cpy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers generated code (pypy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-gen-pypy311
path: ${{ github.workspace }}/flatbuffers-gen/pypy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
# Download FlatBuffers schema files (explicit enumeration for each matrix env)
- name: Download FlatBuffers schema files (cpy314)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-schema-cpy314
path: ${{ github.workspace }}/flatbuffers-schema/cpy314/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers schema files (cpy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-schema-cpy311
path: ${{ github.workspace }}/flatbuffers-schema/cpy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers schema files (pypy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-schema-pypy311
path: ${{ github.workspace }}/flatbuffers-schema/pypy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
# Download SerDes conformance test results (explicit enumeration for each matrix env)
- name: Download SerDes test results (cpy314)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: serdes-test-results-cpy314
path: ${{ github.workspace }}/serdes-test-results/cpy314/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download SerDes test results (cpy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: serdes-test-results-cpy311
path: ${{ github.workspace }}/serdes-test-results/cpy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download SerDes test results (pypy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: serdes-test-results-pypy311
path: ${{ github.workspace }}/serdes-test-results/pypy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download WebSocket conformance HTML reports with-nvx (for RTD)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: websocket-conformance-docs-quick-with-nvx
path: ${{ github.workspace }}/websocket-conformance/with-nvx/
run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download WebSocket conformance HTML reports without-nvx (for RTD)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: websocket-conformance-docs-quick-without-nvx
path: ${{ github.workspace }}/websocket-conformance/without-nvx/
run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Force file system sync (post-download, pre-verification)
run: |
echo "======================================================================"
echo "==> Forcing File System Sync (Post-Download)"
echo "======================================================================"
echo ""
echo "Flushing all file system buffers after artifact downloads."
echo "Ensures all downloaded files are on disk before checksum verification."
echo ""
sync
echo "✅ All buffers flushed to disk"
echo ""
- name: Re-verify wheel checksums (chain of custody)
run: |
echo "======================================================================"
echo "==> Wheel Checksum Re-Verification (Chain of Custody)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying wheel integrity after artifact download."
echo "Detecting corruption during GitHub Actions artifact transfer."
echo ""
HAS_ERRORS=0
WHEELS_VERIFIED=0
# Re-verify wheels from wheels-docker workflow
if [ -d "wheelhouse" ] && [ -f "wheelhouse/CHECKSUMS.sha256" ]; then
echo "==> Re-verifying wheels-docker artifacts..."
cd wheelhouse
while IFS= read -r line; do
# Parse openssl output: "SHA256(file.whl)= checksum" or "SHA2-256(file.whl)= checksum"
ORIGINAL_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')
WHEEL_FILE=$(echo "$line" | sed 's/SHA\(2-\)\?256(\(.*\))=.*/\2/')
if [ ! -f "$WHEEL_FILE" ]; then
echo "❌ CRITICAL: Checksum file references missing wheel: $WHEEL_FILE"
echo " Original checksum line: $line"
echo " This indicates either:"
echo " 1. Path mismatch between build and release workflows"
echo " 2. Wheel was not downloaded from artifacts"
echo " 3. Artifact corruption during transfer"
HAS_ERRORS=1
continue
fi
# Re-compute current checksum
CURRENT_CHECKSUM=$(openssl sha256 "$WHEEL_FILE" | awk '{print $2}')
if [ "$CURRENT_CHECKSUM" = "$ORIGINAL_CHECKSUM" ]; then
echo "✅ $(basename $WHEEL_FILE): checksum verified"
WHEELS_VERIFIED=$((WHEELS_VERIFIED + 1))
else
echo "❌ $(basename $WHEEL_FILE): CHECKSUM MISMATCH!"
echo " Original: $ORIGINAL_CHECKSUM"
echo " Current: $CURRENT_CHECKSUM"
echo " => Artifact CORRUPTED during transfer!"
HAS_ERRORS=1
fi
done < CHECKSUMS.sha256
cd ..
echo ""
else
echo "⚠️ No checksums found for wheels-docker artifacts"
echo ""
fi
# Re-verify wheels from wheels-arm64 workflow
if [ -d "wheelhouse-arm64" ] && [ -f "wheelhouse-arm64/CHECKSUMS.sha256" ]; then
echo "==> Re-verifying wheels-arm64 artifacts..."
cd wheelhouse-arm64
while IFS= read -r line; do
# Parse openssl output: "SHA256(file.whl)= checksum" or "SHA2-256(file.whl)= checksum"
ORIGINAL_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')
WHEEL_FILE=$(echo "$line" | sed 's/SHA\(2-\)\?256(\(.*\))=.*/\2/')
if [ ! -f "$WHEEL_FILE" ]; then
echo "❌ CRITICAL: Checksum file references missing wheel: $WHEEL_FILE"
echo " Original checksum line: $line"
echo " This indicates either:"
echo " 1. Path mismatch between build and release workflows"
echo " 2. Wheel was not downloaded from artifacts"
echo " 3. Artifact corruption during transfer"
HAS_ERRORS=1
continue
fi
# Re-compute current checksum
CURRENT_CHECKSUM=$(openssl sha256 "$WHEEL_FILE" | awk '{print $2}')
if [ "$CURRENT_CHECKSUM" = "$ORIGINAL_CHECKSUM" ]; then
echo "✅ $(basename $WHEEL_FILE): checksum verified"
WHEELS_VERIFIED=$((WHEELS_VERIFIED + 1))
else
echo "❌ $(basename $WHEEL_FILE): CHECKSUM MISMATCH!"
echo " Original: $ORIGINAL_CHECKSUM"
echo " Current: $CURRENT_CHECKSUM"
echo " => Artifact CORRUPTED during transfer!"
HAS_ERRORS=1
fi
done < CHECKSUMS.sha256
cd ..
echo ""
else
echo "⚠️ No checksums found for wheels-arm64 artifacts"
echo ""
fi
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "One or more wheels failed checksum verification."
echo "This indicates CORRUPTION during GitHub Actions artifact transfer:"
echo " 1. Build workflow created valid wheel + checksum"
echo " 2. GitHub Actions corrupted wheel during upload/storage/download"
echo " 3. Downloaded wheel checksum doesn't match original"
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
elif [ $WHEELS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED - NO WHEELS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero wheels were verified. This means:"
echo " 1. No CHECKSUMS.sha256 files were found, OR"
echo " 2. All wheels referenced in checksums were missing"
echo ""
echo "This is a critical failure - we cannot confirm wheel integrity."
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All wheel checksums verified successfully ($WHEELS_VERIFIED wheels)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: build workflows → release workflow"
echo "No corruption detected during artifact transfer."
fi
- name: Consolidate all artifacts
run: |
echo "==> Consolidating all artifacts into unified release directory..."
mkdir -p release-artifacts
# Copy wheels and verification files from wheels workflow
if [ -d "dist" ]; then
echo "Copying wheels workflow artifacts..."
find dist -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \;
# Copy verification escort files with "wheels-" prefix to avoid naming collisions
if [ -f "dist/CHECKSUMS.sha256" ]; then
cp dist/CHECKSUMS.sha256 release-artifacts/wheels-CHECKSUMS.sha256
fi
if [ -f "dist/VALIDATION.txt" ]; then
cp dist/VALIDATION.txt release-artifacts/wheels-VALIDATION.txt
fi
# Copy source distribution verification reports (already have unique names)
find dist -type f -name "*.verify.txt" -exec cp {} release-artifacts/ \; 2>/dev/null || true
fi
# Copy wheels and verification files from wheels-docker workflow
if [ -d "wheelhouse" ]; then
echo "Copying wheels-docker workflow artifacts..."
find wheelhouse -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \;
# Copy verification escort files with "docker-" prefix to avoid naming collisions
if [ -f "wheelhouse/CHECKSUMS.sha256" ]; then
cp wheelhouse/CHECKSUMS.sha256 release-artifacts/docker-CHECKSUMS.sha256
fi
if [ -f "wheelhouse/VALIDATION.txt" ]; then
cp wheelhouse/VALIDATION.txt release-artifacts/docker-VALIDATION.txt
fi
if [ -f "wheelhouse/build-info.txt" ]; then
cp wheelhouse/build-info.txt release-artifacts/docker-build-info.txt
fi
fi
# Copy ARM64 wheels and verification files from wheels-arm64 workflow
if [ -d "wheelhouse-arm64" ]; then
echo "Copying wheels-arm64 workflow artifacts..."
find wheelhouse-arm64 -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \;
# Copy verification escort files with "arm64-" prefix to avoid naming collisions
if [ -f "wheelhouse-arm64/CHECKSUMS.sha256" ]; then
cp wheelhouse-arm64/CHECKSUMS.sha256 release-artifacts/arm64-CHECKSUMS.sha256
fi
if [ -f "wheelhouse-arm64/VALIDATION.txt" ]; then
cp wheelhouse-arm64/VALIDATION.txt release-artifacts/arm64-VALIDATION.txt
fi
if [ -f "wheelhouse-arm64/build-info.txt" ]; then
cp wheelhouse-arm64/build-info.txt release-artifacts/arm64-build-info.txt
fi
fi
# Copy wstest conformance results
if [ -d "wstest-results" ]; then
echo "Copying wstest conformance results..."
find wstest-results -type f -exec cp {} release-artifacts/ \;
fi
# Package FlatBuffers schema as tarball
if [ -d "flatbuffers-schema" ]; then
echo "Packaging FlatBuffers schema..."
tar -czf release-artifacts/flatbuffers-schema.tar.gz -C flatbuffers-schema .
fi
# Package WebSocket conformance reports for RTD
if [ -d "websocket-conformance" ]; then
echo "Packaging WebSocket conformance reports for RTD..."
CONFORMANCE_TARBALL="autobahn-python-websocket-conformance-${RELEASE_NAME}.tar.gz"
tar -czf "release-artifacts/${CONFORMANCE_TARBALL}" -C websocket-conformance .
echo "Created: ${CONFORMANCE_TARBALL}"
fi
echo ""
echo "==> Unified release artifact inventory:"
ls -la release-artifacts/ || echo "No artifacts found"
echo ""
echo "Wheels: $(find release-artifacts -name "*.whl" | wc -l)"
echo "Source dists: $(find release-artifacts -name "*.tar.gz" ! -name "flatbuffers-schema.tar.gz" ! -name "autobahn-python-websocket-conformance-*.tar.gz" | wc -l)"
echo "Verification files (chain-of-custody):"
echo " - SHA256 checksums: $(find release-artifacts -name "*CHECKSUMS.sha256" | wc -l)"
echo " - Build validation: $(find release-artifacts -name "*VALIDATION.txt" | wc -l)"
echo " - Source verification: $(find release-artifacts -name "*.verify.txt" | wc -l)"
echo " - Build metadata: $(find release-artifacts -name "*build-info.txt" | wc -l)"
echo "Wstest reports: $(find release-artifacts -name "*wstest*" | wc -l)"
echo "FlatBuffers schema: $(ls release-artifacts/flatbuffers-schema.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')"
echo "Conformance reports: $(ls release-artifacts/autobahn-python-websocket-conformance-*.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')"
- name: Validate and clean release fileset for GitHub
uses: wamp-proto/wamp-cicd/actions/check-release-fileset@main
with:
distdir: release-artifacts
mode: strict
keep-metadata: true # Keep CHECKSUMS for user verification
targets: |
cpy311-linux-x86_64-manylinux_2_28
cpy311-linux-aarch64-manylinux_2_28
cpy311-win-amd64
cpy312-linux-x86_64-manylinux_2_28
cpy312-win-amd64
cpy313-macos-arm64
cpy313-linux-x86_64-manylinux_2_28
cpy313-linux-aarch64-manylinux_2_28
cpy313-win-amd64
cpy314-macos-arm64
cpy314-linux-x86_64-manylinux_2_28
cpy314-win-amd64
pypy311-macos-arm64
pypy311-linux-x86_64-manylinux_2_28
pypy311-linux-aarch64-manylinux_2_17
pypy311-win-amd64
source
- name: Validate all wheels before release (Final Gate)
run: |
set -o pipefail
echo "======================================================================"
echo "==> FINAL VALIDATION: All Wheels Before Release"
echo "======================================================================"
echo ""
echo "This is the last line of defense before creating GitHub Release."
echo "Any corrupted wheels will be caught here."
echo ""
echo "Installing twine for validation..."
# Install both packaging and twine from master for PEP 639 (Core Metadata 2.4) support
# Use --break-system-packages for consistency (safe in CI)
python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git
python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git
echo ""
echo "==> Validation environment:"
echo "Python: $(python3 --version)"
echo "setuptools: $(python3 -m pip show setuptools | grep '^Version:' || echo 'not installed')"
echo "packaging: $(python3 -m pip show packaging | grep '^Version:' || echo 'not installed')"
echo "twine: $(twine --version)"
echo ""
HAS_ERRORS=0
WHEEL_COUNT=0
for wheel in release-artifacts/*.whl; do
if [ ! -f "$wheel" ]; then
echo "⚠️ No wheels found in release-artifacts/"
continue
fi
WHEEL_COUNT=$((WHEEL_COUNT + 1))
WHEEL_NAME=$(basename "$wheel")
echo "==> Validating [$WHEEL_COUNT]: $WHEEL_NAME"
echo ""
# Test 1: Can unzip read the wheel?
echo " [1/3] ZIP integrity test..."
if unzip -t "$wheel" > /dev/null 2>&1; then
echo " ✅ ZIP test PASS"
else
echo " ❌ ZIP test FAIL - wheel is CORRUPTED!"
echo " This wheel cannot be unzipped and is unusable."
echo " Wheel: $WHEEL_NAME"
HAS_ERRORS=1
fi
# Test 2: Python zipfile module validation
echo " [2/3] Python zipfile test..."
if python3 -m zipfile -t "$wheel" > /dev/null 2>&1; then
echo " ✅ Python zipfile test PASS"
else
echo " ❌ Python zipfile test FAIL - wheel is CORRUPTED!"
echo " Wheel: $WHEEL_NAME"
HAS_ERRORS=1
fi
# Test 3: twine check (validates wheel metadata and structure)
echo " [3/3] Twine validation..."
twine check "$wheel" 2>&1 | tee /tmp/twine_output.txt
TWINE_EXIT=${PIPESTATUS[0]}
# Fail on nonzero exit or any error-like output
if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_output.txt; then
echo " ✅ Twine check PASS"
else
echo " ❌ Twine check FAIL"
echo " Wheel: $WHEEL_NAME"
cat /tmp/twine_output.txt
HAS_ERRORS=1
fi
rm -f /tmp/twine_output.txt
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ FINAL VALIDATION FAILED - RELEASE BLOCKED"
echo "======================================================================"
echo ""
echo "One or more wheels failed integrity checks at the FINAL GATE."
echo ""
echo "This means corrupted wheels made it through the build process"
echo "and were downloaded from artifacts."
echo ""
echo "This should NEVER happen if build-time validation is working."
echo ""
echo "CRITICAL: DO NOT PROCEED WITH RELEASE!"
echo ""
echo "Action required:"
echo " 1. Check build logs for the corrupted wheel(s)"
echo " 2. Identify which workflow produced the corrupt wheel"
echo " 3. Fix the build process"
echo " 4. Re-tag and rebuild from scratch"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ FINAL VALIDATION PASSED - All $WHEEL_COUNT wheels are valid"
echo "======================================================================"
echo ""
echo "All wheels passed integrity checks."
echo "Safe to proceed with GitHub Release creation."
fi
- name: Install jinja2-cli for template rendering
run: |
pip install jinja2-cli
- name: Render release notes from Jinja2 template
run: |
echo "==> Preparing release notes using Jinja2 template..."
echo "Release type: $RELEASE_TYPE"
echo "Release name: $RELEASE_NAME"
# Collect template variables
COMMIT_SHA="${GITHUB_SHA::8}"
BUILD_DATE="$(date -u +'%Y-%m-%d %H:%M:%S UTC')"
WHEEL_COUNT="$(find release-artifacts -name "*.whl" | wc -l)"
SDIST_COUNT="$(find release-artifacts -name "*.tar.gz" | wc -l)"
# Render template using jinja2
jinja2 .github/templates/release-development.md.j2 \
-D release_name="$RELEASE_NAME" \
-D commit_sha="$COMMIT_SHA" \
-D build_date="$BUILD_DATE" \
-D wheel_count="$WHEEL_COUNT" \
-D sdist_count="$SDIST_COUNT" \
-o release-notes.md
echo ""
echo "==> Generated release notes:"
cat release-notes.md
- name: Create development GitHub release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "==> Creating development GitHub release..."
echo "Release type: $RELEASE_TYPE"
echo "Release name: $RELEASE_NAME"
# Delete existing release if it exists (development builds may be rebuilt)
gh release delete "$RELEASE_NAME" --repo "$GITHUB_REPOSITORY" --yes || true
# Create the release using rendered notes
gh release create "$RELEASE_NAME" \
--repo "$GITHUB_REPOSITORY" \
--title "Development Build $RELEASE_NAME" \
--notes-file release-notes.md \
--prerelease \
release-artifacts/*
echo "✅ Release $RELEASE_NAME created successfully"
# Nightly and stable GitHub releases (consolidates wheels from both workflows)
release-nightly:
name: Nightly & Stable GitHub Releases
needs: [check-all-workflows, identifiers]
runs-on: ubuntu-latest
# Only create releases for nightly and stable builds (explicit positive list)
if: |
needs.check-all-workflows.outputs.all_complete == 'true' &&
(github.event_name == 'workflow_dispatch' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')) &&
(needs.identifiers.outputs.release_type == 'nightly' || needs.identifiers.outputs.release_type == 'stable')
env:
RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }}
RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Download and verify macOS wheels with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_macos_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify Windows wheels with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_windows_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify source distribution with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_source_dist }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Debug - List downloaded files
run: |
echo "======================================================================"
echo "==> DEBUG: Files in dist/ after downloading source-distribution"
echo "======================================================================"
echo "Using wheels_run_id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}"
echo ""
ls -la dist/
echo ""
echo "*.tar.gz files:"
find dist/ -name "*.tar.gz" -ls || echo "None found"
echo ""
echo "*.verify.txt files:"
find dist/ -name "*.verify.txt" -ls || echo "None found"
echo ""
shell: bash
- name: Re-verify source distribution integrity (chain of custody)
run: |
echo "======================================================================"
echo "==> Source Distribution Re-Verification (Chain of Custody)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying artifact integrity at release workflow."
echo "Comparing against original verification from wheels workflow."
echo ""
HAS_ERRORS=0
TARBALLS_VERIFIED=0
for tarball in dist/*.tar.gz; do
if [ ! -f "$tarball" ]; then
echo "⚠️ No source distribution found - skipping verification"
continue
fi
BASENAME=$(basename "$tarball")
VERIFY_FILE="dist/${BASENAME%.tar.gz}.verify.txt"
if [ ! -f "$VERIFY_FILE" ]; then
echo "⚠️ Warning: No original verification report found for $BASENAME"
echo " Expected: $VERIFY_FILE"
echo " Artifact may have been created without verification."
echo ""
HAS_ERRORS=1
continue
fi
echo "==> Re-verifying: $BASENAME"
echo ""
TARBALLS_VERIFIED=$((TARBALLS_VERIFIED + 1))
# Re-compute SHA256 hash
echo "Computing current SHA256 fingerprint..."
CURRENT_SHA256=$(openssl sha256 "$tarball" | awk '{print $2}')
echo "Current SHA256: $CURRENT_SHA256"
echo ""
# Extract original SHA256 from verification report
echo "Extracting original SHA256 from verification report..."
echo "DEBUG: Contents of $VERIFY_FILE:"
cat "$VERIFY_FILE"
echo ""
echo "DEBUG: Lines matching 'SHA256':"
grep -i "SHA256" "$VERIFY_FILE" || echo "(no matches found)"
echo ""
ORIGINAL_SHA256=$(grep -E "^SHA(2-)?256\(" "$VERIFY_FILE" | awk -F'= ' '{print $2}' | tr -d ' ' || echo "")
if [ -z "$ORIGINAL_SHA256" ]; then
echo "❌ ERROR: Could not extract SHA256 from verification report"
echo " The verification report may have an unexpected format"
HAS_ERRORS=1
continue
fi
echo "Original SHA256: $ORIGINAL_SHA256"
echo ""
# Compare hashes
if [ "$CURRENT_SHA256" = "$ORIGINAL_SHA256" ]; then
echo "✅ SHA256 MATCH - Artifact integrity confirmed through pipeline"
else
echo "❌ SHA256 MISMATCH - Artifact corrupted during transfer!"
echo ""
echo "This indicates corruption between:"
echo " 1. wheels workflow (artifact creation)"
echo " 2. release workflow (artifact consumption)"
echo ""
echo "Expected: $ORIGINAL_SHA256"
echo "Got: $CURRENT_SHA256"
echo ""
HAS_ERRORS=1
fi
echo ""
# Re-run gzip integrity test
echo "Re-running gzip integrity test..."
if gzip -tv "$tarball" 2>&1 | tee /tmp/gzip_output.txt; then
GZIP_EXIT=$?
if [ $GZIP_EXIT -eq 0 ]; then
echo "✅ Gzip test PASS"
else
echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)"
HAS_ERRORS=1
fi
else
GZIP_EXIT=$?
echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)"
cat /tmp/gzip_output.txt
HAS_ERRORS=1
fi
echo ""
# Re-run tar extraction test
echo "Re-running tar extraction test..."
if tar -tzf "$tarball" > /dev/null 2>&1; then
echo "✅ Tar extraction test PASS"
else
TAR_EXIT=$?
echo "❌ Tar extraction test FAIL (exit code $TAR_EXIT)"
HAS_ERRORS=1
fi
echo ""
echo "------------------------------------------------------------------------"
echo "Original verification report (first 30 lines):"
echo "------------------------------------------------------------------------"
head -30 "$VERIFY_FILE"
echo ""
echo "... (full report available in dist/$VERIFY_FILE)"
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "Source distribution failed integrity checks at release workflow."
echo "This indicates either:"
echo " 1. Corruption during artifact transfer"
echo " 2. Packaging bug not caught at origin"
echo ""
echo "DO NOT PROCEED WITH RELEASE - investigate and fix first."
echo ""
exit 1
elif [ $TARBALLS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ RE-VERIFICATION FAILED - NO SOURCE DISTRIBUTIONS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero source distributions were verified. This means:"
echo " 1. No *.tar.gz files were found in dist/, OR"
echo " 2. Source distribution download from artifacts failed"
echo ""
echo "This is a critical failure - we cannot confirm source distribution integrity."
echo "This was the root cause of issue #1714 (corrupted v25.10.1 on PyPI)."
echo ""
echo "DO NOT PROCEED WITH RELEASE - investigate artifact download!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All source distributions re-verified successfully ($TARBALLS_VERIFIED tarballs)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: wheels workflow → release workflow"
echo "Cryptographic integrity maintained throughout pipeline."
fi
shell: bash
- name: Download and verify Linux wheels without NVX with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_linux_no_nvx }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify manylinux x86_64 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_manylinux_x86_64 }}
path: ${{ github.workspace }}/wheelhouse/
run-id: ${{ needs.check-all-workflows.outputs.wheels_docker_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 CPython 3.11 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_cp311 }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 CPython 3.13 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_cp313 }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 PyPy 3.11 Bookworm artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_pypy_bookworm }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 PyPy 3.11 Trixie artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_pypy_trixie }}
path: ${{ github.workspace }}/wheelhouse-arm64/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
# Download wstest conformance summary (explicit enumeration, no pattern)
- name: Download wstest conformance summary (quick)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: conformance-summary-quick
path: ${{ github.workspace }}/wstest-results/
run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
# Download FlatBuffers generated code (explicit enumeration for each matrix env)
- name: Download FlatBuffers generated code (cpy314)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-gen-cpy314
path: ${{ github.workspace }}/flatbuffers-gen/cpy314/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers generated code (cpy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-gen-cpy311
path: ${{ github.workspace }}/flatbuffers-gen/cpy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers generated code (pypy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-gen-pypy311
path: ${{ github.workspace }}/flatbuffers-gen/pypy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
# Download FlatBuffers schema files (explicit enumeration for each matrix env)
- name: Download FlatBuffers schema files (cpy314)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-schema-cpy314
path: ${{ github.workspace }}/flatbuffers-schema/cpy314/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers schema files (cpy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-schema-cpy311
path: ${{ github.workspace }}/flatbuffers-schema/cpy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download FlatBuffers schema files (pypy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: flatbuffers-schema-pypy311
path: ${{ github.workspace }}/flatbuffers-schema/pypy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
# Download SerDes conformance test results (explicit enumeration for each matrix env)
- name: Download SerDes test results (cpy314)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: serdes-test-results-cpy314
path: ${{ github.workspace }}/serdes-test-results/cpy314/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download SerDes test results (cpy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: serdes-test-results-cpy311
path: ${{ github.workspace }}/serdes-test-results/cpy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download SerDes test results (pypy311)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: serdes-test-results-pypy311
path: ${{ github.workspace }}/serdes-test-results/pypy311/
run-id: ${{ needs.check-all-workflows.outputs.main_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download WebSocket conformance HTML reports with-nvx (for RTD)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: websocket-conformance-docs-quick-with-nvx
path: ${{ github.workspace }}/websocket-conformance/with-nvx/
run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Download WebSocket conformance HTML reports without-nvx (for RTD)
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: websocket-conformance-docs-quick-without-nvx
path: ${{ github.workspace }}/websocket-conformance/without-nvx/
run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- name: Force file system sync (post-download, pre-verification)
run: |
echo "======================================================================"
echo "==> Forcing File System Sync (Post-Download)"
echo "======================================================================"
echo ""
echo "Flushing all file system buffers after artifact downloads."
echo "Ensures all downloaded files are on disk before checksum verification."
echo ""
sync
echo "✅ All buffers flushed to disk"
echo ""
- name: Re-verify wheel checksums (chain of custody)
run: |
echo "======================================================================"
echo "==> Wheel Checksum Re-Verification (Chain of Custody)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying wheel integrity after artifact download."
echo "Detecting corruption during GitHub Actions artifact transfer."
echo ""
HAS_ERRORS=0
WHEELS_VERIFIED=0
# Re-verify wheels from wheels-docker workflow
if [ -d "wheelhouse" ] && [ -f "wheelhouse/CHECKSUMS.sha256" ]; then
echo "==> Re-verifying wheels-docker artifacts..."
cd wheelhouse
while IFS= read -r line; do
# Parse openssl output: "SHA256(file.whl)= checksum" or "SHA2-256(file.whl)= checksum"
ORIGINAL_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')
WHEEL_FILE=$(echo "$line" | sed 's/SHA\(2-\)\?256(\(.*\))=.*/\2/')
if [ ! -f "$WHEEL_FILE" ]; then
echo "❌ CRITICAL: Checksum file references missing wheel: $WHEEL_FILE"
echo " Original checksum line: $line"
echo " This indicates either:"
echo " 1. Path mismatch between build and release workflows"
echo " 2. Wheel was not downloaded from artifacts"
echo " 3. Artifact corruption during transfer"
HAS_ERRORS=1
continue
fi
# Re-compute current checksum
CURRENT_CHECKSUM=$(openssl sha256 "$WHEEL_FILE" | awk '{print $2}')
if [ "$CURRENT_CHECKSUM" = "$ORIGINAL_CHECKSUM" ]; then
echo "✅ $(basename $WHEEL_FILE): checksum verified"
WHEELS_VERIFIED=$((WHEELS_VERIFIED + 1))
else
echo "❌ $(basename $WHEEL_FILE): CHECKSUM MISMATCH!"
echo " Original: $ORIGINAL_CHECKSUM"
echo " Current: $CURRENT_CHECKSUM"
echo " => Artifact CORRUPTED during transfer!"
HAS_ERRORS=1
fi
done < CHECKSUMS.sha256
cd ..
echo ""
else
echo "⚠️ No checksums found for wheels-docker artifacts"
echo ""
fi
# Re-verify wheels from wheels-arm64 workflow
if [ -d "wheelhouse-arm64" ] && [ -f "wheelhouse-arm64/CHECKSUMS.sha256" ]; then
echo "==> Re-verifying wheels-arm64 artifacts..."
cd wheelhouse-arm64
while IFS= read -r line; do
# Parse openssl output: "SHA256(file.whl)= checksum" or "SHA2-256(file.whl)= checksum"
ORIGINAL_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')
WHEEL_FILE=$(echo "$line" | sed 's/SHA\(2-\)\?256(\(.*\))=.*/\2/')
if [ ! -f "$WHEEL_FILE" ]; then
echo "❌ CRITICAL: Checksum file references missing wheel: $WHEEL_FILE"
echo " Original checksum line: $line"
echo " This indicates either:"
echo " 1. Path mismatch between build and release workflows"
echo " 2. Wheel was not downloaded from artifacts"
echo " 3. Artifact corruption during transfer"
HAS_ERRORS=1
continue
fi
# Re-compute current checksum
CURRENT_CHECKSUM=$(openssl sha256 "$WHEEL_FILE" | awk '{print $2}')
if [ "$CURRENT_CHECKSUM" = "$ORIGINAL_CHECKSUM" ]; then
echo "✅ $(basename $WHEEL_FILE): checksum verified"
WHEELS_VERIFIED=$((WHEELS_VERIFIED + 1))
else
echo "❌ $(basename $WHEEL_FILE): CHECKSUM MISMATCH!"
echo " Original: $ORIGINAL_CHECKSUM"
echo " Current: $CURRENT_CHECKSUM"
echo " => Artifact CORRUPTED during transfer!"
HAS_ERRORS=1
fi
done < CHECKSUMS.sha256
cd ..
echo ""
else
echo "⚠️ No checksums found for wheels-arm64 artifacts"
echo ""
fi
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "One or more wheels failed checksum verification."
echo "This indicates CORRUPTION during GitHub Actions artifact transfer:"
echo " 1. Build workflow created valid wheel + checksum"
echo " 2. GitHub Actions corrupted wheel during upload/storage/download"
echo " 3. Downloaded wheel checksum doesn't match original"
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
elif [ $WHEELS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED - NO WHEELS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero wheels were verified. This means:"
echo " 1. No CHECKSUMS.sha256 files were found, OR"
echo " 2. All wheels referenced in checksums were missing"
echo ""
echo "This is a critical failure - we cannot confirm wheel integrity."
echo ""
echo "DO NOT PROCEED WITH RELEASE!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All wheel checksums verified successfully ($WHEELS_VERIFIED wheels)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: build workflows → release workflow"
echo "No corruption detected during artifact transfer."
fi
- name: Consolidate all artifacts
run: |
echo "==> Consolidating all artifacts into unified release directory..."
mkdir -p release-artifacts
# Copy wheels and verification files from wheels workflow
if [ -d "dist" ]; then
echo "Copying wheels workflow artifacts..."
find dist -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \;
# Copy verification escort files with "wheels-" prefix to avoid naming collisions
if [ -f "dist/CHECKSUMS.sha256" ]; then
cp dist/CHECKSUMS.sha256 release-artifacts/wheels-CHECKSUMS.sha256
fi
if [ -f "dist/VALIDATION.txt" ]; then
cp dist/VALIDATION.txt release-artifacts/wheels-VALIDATION.txt
fi
# Copy source distribution verification reports (already have unique names)
find dist -type f -name "*.verify.txt" -exec cp {} release-artifacts/ \; 2>/dev/null || true
fi
# Copy wheels and verification files from wheels-docker workflow
if [ -d "wheelhouse" ]; then
echo "Copying wheels-docker workflow artifacts..."
find wheelhouse -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \;
# Copy verification escort files with "docker-" prefix to avoid naming collisions
if [ -f "wheelhouse/CHECKSUMS.sha256" ]; then
cp wheelhouse/CHECKSUMS.sha256 release-artifacts/docker-CHECKSUMS.sha256
fi
if [ -f "wheelhouse/VALIDATION.txt" ]; then
cp wheelhouse/VALIDATION.txt release-artifacts/docker-VALIDATION.txt
fi
if [ -f "wheelhouse/build-info.txt" ]; then
cp wheelhouse/build-info.txt release-artifacts/docker-build-info.txt
fi
fi
# Copy ARM64 wheels and verification files from wheels-arm64 workflow
if [ -d "wheelhouse-arm64" ]; then
echo "Copying wheels-arm64 workflow artifacts..."
find wheelhouse-arm64 -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \;
# Copy verification escort files with "arm64-" prefix to avoid naming collisions
if [ -f "wheelhouse-arm64/CHECKSUMS.sha256" ]; then
cp wheelhouse-arm64/CHECKSUMS.sha256 release-artifacts/arm64-CHECKSUMS.sha256
fi
if [ -f "wheelhouse-arm64/VALIDATION.txt" ]; then
cp wheelhouse-arm64/VALIDATION.txt release-artifacts/arm64-VALIDATION.txt
fi
if [ -f "wheelhouse-arm64/build-info.txt" ]; then
cp wheelhouse-arm64/build-info.txt release-artifacts/arm64-build-info.txt
fi
fi
# Copy wstest conformance results
if [ -d "wstest-results" ]; then
echo "Copying wstest conformance results..."
find wstest-results -type f -exec cp {} release-artifacts/ \;
fi
# Package FlatBuffers schema as tarball
if [ -d "flatbuffers-schema" ]; then
echo "Packaging FlatBuffers schema..."
tar -czf release-artifacts/flatbuffers-schema.tar.gz -C flatbuffers-schema .
fi
# Package WebSocket conformance reports for RTD
if [ -d "websocket-conformance" ]; then
echo "Packaging WebSocket conformance reports for RTD..."
CONFORMANCE_TARBALL="autobahn-python-websocket-conformance-${RELEASE_NAME}.tar.gz"
tar -czf "release-artifacts/${CONFORMANCE_TARBALL}" -C websocket-conformance .
echo "Created: ${CONFORMANCE_TARBALL}"
fi
echo ""
echo "==> Unified release artifact inventory:"
ls -la release-artifacts/ || echo "No artifacts found"
echo ""
echo "Wheels: $(find release-artifacts -name "*.whl" | wc -l)"
echo "Source dists: $(find release-artifacts -name "*.tar.gz" ! -name "flatbuffers-schema.tar.gz" ! -name "autobahn-python-websocket-conformance-*.tar.gz" | wc -l)"
echo "Verification files (chain-of-custody):"
echo " - SHA256 checksums: $(find release-artifacts -name "*CHECKSUMS.sha256" | wc -l)"
echo " - Build validation: $(find release-artifacts -name "*VALIDATION.txt" | wc -l)"
echo " - Source verification: $(find release-artifacts -name "*.verify.txt" | wc -l)"
echo " - Build metadata: $(find release-artifacts -name "*build-info.txt" | wc -l)"
echo "Wstest reports: $(find release-artifacts -name "*wstest*" | wc -l)"
echo "FlatBuffers schema: $(ls release-artifacts/flatbuffers-schema.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')"
echo "Conformance reports: $(ls release-artifacts/autobahn-python-websocket-conformance-*.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')"
- name: Validate and clean release fileset for GitHub
uses: wamp-proto/wamp-cicd/actions/check-release-fileset@main
with:
distdir: release-artifacts
mode: strict
keep-metadata: true # Keep CHECKSUMS for user verification
targets: |
cpy311-linux-x86_64-manylinux_2_28
cpy311-linux-aarch64-manylinux_2_28
cpy311-win-amd64
cpy312-linux-x86_64-manylinux_2_28
cpy312-win-amd64
cpy313-macos-arm64
cpy313-linux-x86_64-manylinux_2_28
cpy313-linux-aarch64-manylinux_2_28
cpy313-win-amd64
cpy314-macos-arm64
cpy314-linux-x86_64-manylinux_2_28
cpy314-win-amd64
pypy311-macos-arm64
pypy311-linux-x86_64-manylinux_2_28
pypy311-linux-aarch64-manylinux_2_17
pypy311-win-amd64
source
- name: Validate all wheels before release (Final Gate)
run: |
set -o pipefail
echo "======================================================================"
echo "==> FINAL VALIDATION: All Wheels Before Release"
echo "======================================================================"
echo ""
echo "This is the last line of defense before creating GitHub Release."
echo "Any corrupted wheels will be caught here."
echo ""
echo "Installing twine for validation..."
# Install both packaging and twine from master for PEP 639 (Core Metadata 2.4) support
# Use --break-system-packages for consistency (safe in CI)
python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git
python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git
echo ""
echo "==> Validation environment:"
echo "Python: $(python3 --version)"
echo "setuptools: $(python3 -m pip show setuptools | grep '^Version:' || echo 'not installed')"
echo "packaging: $(python3 -m pip show packaging | grep '^Version:' || echo 'not installed')"
echo "twine: $(twine --version)"
echo ""
HAS_ERRORS=0
WHEEL_COUNT=0
for wheel in release-artifacts/*.whl; do
if [ ! -f "$wheel" ]; then
echo "⚠️ No wheels found in release-artifacts/"
continue
fi
WHEEL_COUNT=$((WHEEL_COUNT + 1))
WHEEL_NAME=$(basename "$wheel")
echo "==> Validating [$WHEEL_COUNT]: $WHEEL_NAME"
echo ""
# Test 1: Can unzip read the wheel?
echo " [1/3] ZIP integrity test..."
if unzip -t "$wheel" > /dev/null 2>&1; then
echo " ✅ ZIP test PASS"
else
echo " ❌ ZIP test FAIL - wheel is CORRUPTED!"
echo " This wheel cannot be unzipped and is unusable."
echo " Wheel: $WHEEL_NAME"
HAS_ERRORS=1
fi
# Test 2: Python zipfile module validation
echo " [2/3] Python zipfile test..."
if python3 -m zipfile -t "$wheel" > /dev/null 2>&1; then
echo " ✅ Python zipfile test PASS"
else
echo " ❌ Python zipfile test FAIL - wheel is CORRUPTED!"
echo " Wheel: $WHEEL_NAME"
HAS_ERRORS=1
fi
# Test 3: twine check (validates wheel metadata and structure)
echo " [3/3] Twine validation..."
twine check "$wheel" 2>&1 | tee /tmp/twine_output.txt
TWINE_EXIT=${PIPESTATUS[0]}
# Fail on nonzero exit or any error-like output
if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_output.txt; then
echo " ✅ Twine check PASS"
else
echo " ❌ Twine check FAIL"
echo " Wheel: $WHEEL_NAME"
cat /tmp/twine_output.txt
HAS_ERRORS=1
fi
rm -f /tmp/twine_output.txt
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ FINAL VALIDATION FAILED - RELEASE BLOCKED"
echo "======================================================================"
echo ""
echo "One or more wheels failed integrity checks at the FINAL GATE."
echo ""
echo "This means corrupted wheels made it through the build process"
echo "and were downloaded from artifacts."
echo ""
echo "This should NEVER happen if build-time validation is working."
echo ""
echo "CRITICAL: DO NOT PROCEED WITH RELEASE!"
echo ""
echo "Action required:"
echo " 1. Check build logs for the corrupted wheel(s)"
echo " 2. Identify which workflow produced the corrupt wheel"
echo " 3. Fix the build process"
echo " 4. Re-tag and rebuild from scratch"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ FINAL VALIDATION PASSED - All $WHEEL_COUNT wheels are valid"
echo "======================================================================"
echo ""
echo "All wheels passed integrity checks."
echo "Safe to proceed with GitHub Release creation."
fi
- name: Install jinja2-cli for template rendering
run: |
pip install jinja2-cli
- name: Render release notes from Jinja2 template
run: |
echo "==> Preparing release notes using Jinja2 template..."
echo "Release type: $RELEASE_TYPE"
echo "Release name: $RELEASE_NAME"
# Collect template variables
COMMIT_SHA="${GITHUB_SHA::8}"
BUILD_DATE="$(date -u +'%Y-%m-%d %H:%M:%S UTC')"
WHEEL_COUNT="$(find release-artifacts -name "*.whl" | wc -l)"
SDIST_COUNT="$(find release-artifacts -name "*.tar.gz" | wc -l)"
# Select template based on release type
if [ "$RELEASE_TYPE" = "stable" ]; then
TEMPLATE=".github/templates/release-stable.md.j2"
else
TEMPLATE=".github/templates/release-nightly.md.j2"
fi
# Render template using jinja2
jinja2 "$TEMPLATE" \
-D release_name="$RELEASE_NAME" \
-D commit_sha="$COMMIT_SHA" \
-D build_date="$BUILD_DATE" \
-D wheel_count="$WHEEL_COUNT" \
-D sdist_count="$SDIST_COUNT" \
-o release-notes.md
echo ""
echo "==> Generated release notes:"
cat release-notes.md
- name: Create unified GitHub release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "==> Creating unified GitHub release..."
echo "Release type: $RELEASE_TYPE"
echo "Release name: $RELEASE_NAME"
# Delete existing release if it exists (for nightly builds)
gh release delete "$RELEASE_NAME" --repo "$GITHUB_REPOSITORY" --yes || true
# Set release title and discussion category based on type
if [ "$RELEASE_TYPE" = "stable" ]; then
TITLE="Release $RELEASE_NAME"
DISCUSSION_FLAG="--discussion-category ci-cd"
else
TITLE="Nightly Build $RELEASE_NAME"
DISCUSSION_FLAG=""
fi
# Create the release using rendered notes
gh release create "$RELEASE_NAME" \
--repo "$GITHUB_REPOSITORY" \
--title "$TITLE" \
--notes-file release-notes.md \
$DISCUSSION_FLAG \
release-artifacts/*
echo "✅ Release $RELEASE_NAME created successfully"
# Stable release publishing: PyPI and RTD (consolidates from both wheel workflows)
release-stable:
name: Stable Release (PyPI & RTD)
needs: [check-all-workflows, identifiers, release-nightly]
runs-on: ubuntu-latest
# Only publish to PyPI for stable releases (explicit positive list)
if: |
needs.check-all-workflows.outputs.all_complete == 'true' &&
needs.identifiers.outputs.release_type == 'stable'
env:
RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }}
RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }}
environment:
name: pypi
url: https://pypi.org/p/autobahn
permissions:
id-token: write # For trusted publishing
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Download and verify macOS wheels with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_macos_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify Windows wheels with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_windows_wheels }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify source distribution with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_source_dist }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Debug - List downloaded files
run: |
echo "======================================================================"
echo "==> DEBUG: Files in dist/ after downloading source-distribution"
echo "======================================================================"
echo "Using wheels_run_id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}"
echo ""
ls -la dist/
echo ""
echo "*.tar.gz files:"
find dist/ -name "*.tar.gz" -ls || echo "None found"
echo ""
echo "*.verify.txt files:"
find dist/ -name "*.verify.txt" -ls || echo "None found"
echo ""
shell: bash
- name: Re-verify source distribution integrity (chain of custody)
run: |
echo "======================================================================"
echo "==> Source Distribution Re-Verification (Chain of Custody)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying artifact integrity at release workflow."
echo "Comparing against original verification from wheels workflow."
echo ""
HAS_ERRORS=0
TARBALLS_VERIFIED=0
for tarball in dist/*.tar.gz; do
if [ ! -f "$tarball" ]; then
echo "⚠️ No source distribution found - skipping verification"
continue
fi
BASENAME=$(basename "$tarball")
VERIFY_FILE="dist/${BASENAME%.tar.gz}.verify.txt"
if [ ! -f "$VERIFY_FILE" ]; then
echo "⚠️ Warning: No original verification report found for $BASENAME"
echo " Expected: $VERIFY_FILE"
echo " Artifact may have been created without verification."
echo ""
HAS_ERRORS=1
continue
fi
echo "==> Re-verifying: $BASENAME"
echo ""
TARBALLS_VERIFIED=$((TARBALLS_VERIFIED + 1))
# Re-compute SHA256 hash
echo "Computing current SHA256 fingerprint..."
CURRENT_SHA256=$(openssl sha256 "$tarball" | awk '{print $2}')
echo "Current SHA256: $CURRENT_SHA256"
echo ""
# Extract original SHA256 from verification report
echo "Extracting original SHA256 from verification report..."
echo "DEBUG: Contents of $VERIFY_FILE:"
cat "$VERIFY_FILE"
echo ""
echo "DEBUG: Lines matching 'SHA256':"
grep -i "SHA256" "$VERIFY_FILE" || echo "(no matches found)"
echo ""
ORIGINAL_SHA256=$(grep -E "^SHA(2-)?256\(" "$VERIFY_FILE" | awk -F'= ' '{print $2}' | tr -d ' ' || echo "")
if [ -z "$ORIGINAL_SHA256" ]; then
echo "❌ ERROR: Could not extract SHA256 from verification report"
echo " The verification report may have an unexpected format"
HAS_ERRORS=1
continue
fi
echo "Original SHA256: $ORIGINAL_SHA256"
echo ""
# Compare hashes
if [ "$CURRENT_SHA256" = "$ORIGINAL_SHA256" ]; then
echo "✅ SHA256 MATCH - Artifact integrity confirmed through pipeline"
else
echo "❌ SHA256 MISMATCH - Artifact corrupted during transfer!"
echo ""
echo "This indicates corruption between:"
echo " 1. wheels workflow (artifact creation)"
echo " 2. release workflow (artifact consumption)"
echo ""
echo "Expected: $ORIGINAL_SHA256"
echo "Got: $CURRENT_SHA256"
echo ""
HAS_ERRORS=1
fi
echo ""
# Re-run gzip integrity test
echo "Re-running gzip integrity test..."
if gzip -tv "$tarball" 2>&1 | tee /tmp/gzip_output.txt; then
GZIP_EXIT=$?
if [ $GZIP_EXIT -eq 0 ]; then
echo "✅ Gzip test PASS"
else
echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)"
HAS_ERRORS=1
fi
else
GZIP_EXIT=$?
echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)"
cat /tmp/gzip_output.txt
HAS_ERRORS=1
fi
echo ""
# Re-run tar extraction test
echo "Re-running tar extraction test..."
if tar -tzf "$tarball" > /dev/null 2>&1; then
echo "✅ Tar extraction test PASS"
else
TAR_EXIT=$?
echo "❌ Tar extraction test FAIL (exit code $TAR_EXIT)"
HAS_ERRORS=1
fi
echo ""
echo "------------------------------------------------------------------------"
echo "Original verification report (first 30 lines):"
echo "------------------------------------------------------------------------"
head -30 "$VERIFY_FILE"
echo ""
echo "... (full report available in dist/$VERIFY_FILE)"
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "Source distribution failed integrity checks at release workflow."
echo "This indicates either:"
echo " 1. Corruption during artifact transfer"
echo " 2. Packaging bug not caught at origin"
echo ""
echo "DO NOT PROCEED WITH RELEASE - investigate and fix first."
echo ""
exit 1
elif [ $TARBALLS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ RE-VERIFICATION FAILED - NO SOURCE DISTRIBUTIONS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero source distributions were verified. This means:"
echo " 1. No *.tar.gz files were found in dist/, OR"
echo " 2. Source distribution download from artifacts failed"
echo ""
echo "This is a critical failure - we cannot confirm source distribution integrity."
echo "This was the root cause of issue #1714 (corrupted v25.10.1 on PyPI)."
echo ""
echo "DO NOT PROCEED WITH RELEASE - investigate artifact download!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All source distributions re-verified successfully ($TARBALLS_VERIFIED tarballs)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: wheels workflow → release workflow"
echo "Cryptographic integrity maintained throughout pipeline."
fi
shell: bash
- name: Download and verify Linux wheels without NVX with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_linux_no_nvx }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify manylinux x86_64 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_manylinux_x86_64 }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_docker_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 CPython 3.11 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_cp311 }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 CPython 3.13 artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_cp313 }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 PyPy 3.11 Bookworm artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_pypy_bookworm }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Download and verify ARM64 PyPy 3.11 Trixie artifacts with retry logic
uses: wamp-proto/wamp-cicd/actions/download-artifact-verified@main
with:
name: ${{ needs.check-all-workflows.outputs.artifact_arm64_pypy_trixie }}
path: ${{ github.workspace }}/dist/
run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
max-attempts: 5
retry-delay: 30
continue-on-error: true
- name: Force file system sync (post-download, pre-verification)
run: |
echo "======================================================================"
echo "==> Forcing File System Sync (Post-Download - PyPI Upload)"
echo "======================================================================"
echo ""
echo "Flushing all file system buffers after artifact downloads."
echo "Ensures all downloaded files are on disk before checksum verification"
echo "and PyPI upload."
echo ""
sync
echo "✅ All buffers flushed to disk"
echo ""
- name: Re-verify wheel checksums (chain of custody)
run: |
echo "======================================================================"
echo "==> Wheel Checksum Re-Verification (Chain of Custody - PyPI Upload)"
echo "======================================================================"
echo ""
echo "OpenSSL version:"
openssl version
echo ""
echo "Re-verifying wheel integrity before PyPI upload."
echo "Detecting corruption during GitHub Actions artifact transfer."
echo ""
HAS_ERRORS=0
WHEELS_VERIFIED=0
# Find all CHECKSUMS.sha256 files (multiple may exist from different artifacts)
CHECKSUM_FILES=$(find dist/ -name "CHECKSUMS.sha256")
if [ -z "$CHECKSUM_FILES" ]; then
echo "❌ CRITICAL: No checksum files found in dist/"
echo " Wheels downloaded without chain-of-custody verification!"
echo " This should never happen if build workflows generated checksums."
echo ""
HAS_ERRORS=1
else
for CHECKSUM_FILE in $CHECKSUM_FILES; do
echo "==> Processing checksum file: $CHECKSUM_FILE"
# Change to the directory containing the checksum file
CHECKSUM_DIR=$(dirname "$CHECKSUM_FILE")
cd "$CHECKSUM_DIR"
while IFS= read -r line; do
# Parse openssl output: "SHA256(file.whl)= checksum" or "SHA2-256(file.whl)= checksum"
ORIGINAL_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')
WHEEL_FILE=$(echo "$line" | sed 's/SHA\(2-\)\?256(\(.*\))=.*/\2/')
if [ ! -f "$WHEEL_FILE" ]; then
echo "❌ CRITICAL: Checksum file references missing wheel: $WHEEL_FILE"
echo " Original checksum line: $line"
echo " This indicates either:"
echo " 1. Path mismatch between build and release workflows"
echo " 2. Wheel was not downloaded from artifacts"
echo " 3. Artifact corruption during transfer"
HAS_ERRORS=1
continue
fi
# Re-compute current checksum
CURRENT_CHECKSUM=$(openssl sha256 "$WHEEL_FILE" | awk '{print $2}')
if [ "$CURRENT_CHECKSUM" = "$ORIGINAL_CHECKSUM" ]; then
echo "✅ $(basename $WHEEL_FILE): checksum verified"
WHEELS_VERIFIED=$((WHEELS_VERIFIED + 1))
else
echo "❌ $(basename $WHEEL_FILE): CHECKSUM MISMATCH!"
echo " Original: $ORIGINAL_CHECKSUM"
echo " Current: $CURRENT_CHECKSUM"
echo " => Artifact CORRUPTED during transfer!"
HAS_ERRORS=1
fi
done < "$(basename $CHECKSUM_FILE)"
# Return to workflow root
cd - > /dev/null
echo ""
done
fi
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED"
echo "======================================================================"
echo ""
echo "One or more wheels failed checksum verification."
echo "This indicates CORRUPTION during GitHub Actions artifact transfer:"
echo " 1. Build workflow created valid wheel + checksum"
echo " 2. GitHub Actions corrupted wheel during upload/storage/download"
echo " 3. Downloaded wheel checksum doesn't match original"
echo ""
echo "CRITICAL: DO NOT UPLOAD TO PYPI!"
echo ""
exit 1
elif [ $WHEELS_VERIFIED -eq 0 ]; then
echo "======================================================================"
echo "❌ CHECKSUM RE-VERIFICATION FAILED - NO WHEELS VERIFIED"
echo "======================================================================"
echo ""
echo "Zero wheels were verified. This means:"
echo " 1. No CHECKSUMS.sha256 files were found, OR"
echo " 2. All wheels referenced in checksums were missing"
echo ""
echo "This is a critical failure - we cannot confirm wheel integrity."
echo ""
echo "CRITICAL: DO NOT UPLOAD TO PYPI!"
echo ""
exit 1
else
echo "======================================================================"
echo "✅ All wheel checksums verified successfully ($WHEELS_VERIFIED wheels)"
echo "======================================================================"
echo ""
echo "Chain of custody confirmed: build workflows → PyPI upload"
echo "No corruption detected during artifact transfer."
echo "Safe to proceed with PyPI upload."
fi
- name: Validate and clean release fileset for PyPI
uses: wamp-proto/wamp-cicd/actions/check-release-fileset@main
with:
distdir: dist
mode: strict
# keep-metadata: false (default - removes CHECKSUMS for PyPI)
targets: |
cpy311-linux-x86_64-manylinux_2_28
cpy311-linux-aarch64-manylinux_2_28
cpy311-win-amd64
cpy312-linux-x86_64-manylinux_2_28
cpy312-win-amd64
cpy313-macos-arm64
cpy313-linux-x86_64-manylinux_2_28
cpy313-linux-aarch64-manylinux_2_28
cpy313-win-amd64
cpy314-macos-arm64
cpy314-linux-x86_64-manylinux_2_28
cpy314-win-amd64
pypy311-macos-arm64
pypy311-linux-x86_64-manylinux_2_28
pypy311-linux-aarch64-manylinux_2_17
pypy311-win-amd64
source
- name: Check if version already exists on PyPI
id: pypi_check
run: |
# Extract version from release name (v25.9.1 -> 25.9.1)
VERSION="${RELEASE_NAME#v}"
echo "Checking if autobahn version ${VERSION} exists on PyPI..."
# Query PyPI JSON API
HTTP_CODE=$(curl -s -o /tmp/pypi_response.json -w "%{http_code}" "https://pypi.org/pypi/autobahn/${VERSION}/json")
if [ "${HTTP_CODE}" = "200" ]; then
echo "⚠️ WARNING: Version ${VERSION} already exists on PyPI!"
echo "⚠️ PyPI does not allow re-uploading the same version."
echo "⚠️ Skipping PyPI upload to avoid error."
echo "exists=true" >> $GITHUB_OUTPUT
elif [ "${HTTP_CODE}" = "404" ]; then
echo "✅ Version ${VERSION} does not exist on PyPI yet - proceeding with upload"
echo "exists=false" >> $GITHUB_OUTPUT
else
echo "⚠️ Unexpected HTTP code ${HTTP_CODE} from PyPI API"
echo "⚠️ Response:"
cat /tmp/pypi_response.json || echo "(no response)"
echo "⚠️ Proceeding with upload anyway (will fail if version exists)"
echo "exists=false" >> $GITHUB_OUTPUT
fi
rm -f /tmp/pypi_response.json
- name: Final validation before PyPI upload
if: steps.pypi_check.outputs.exists == 'false'
run: |
set -o pipefail
echo "======================================================================"
echo "==> FINAL PYPI VALIDATION: All Packages"
echo "======================================================================"
echo ""
echo "Last chance to catch corrupted packages before PyPI upload."
echo ""
# Install both packaging and twine from master for PEP 639 (Core Metadata 2.4) support
# Use --break-system-packages for consistency (safe in CI)
python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git
python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git
echo ""
echo "==> Validation environment:"
echo "Python: $(python3 --version)"
echo "setuptools: $(python3 -m pip show setuptools | grep '^Version:' || echo 'not installed')"
echo "packaging: $(python3 -m pip show packaging | grep '^Version:' || echo 'not installed')"
echo "twine: $(twine --version)"
echo ""
HAS_ERRORS=0
for pkg in dist/*.whl dist/*.tar.gz; do
if [ ! -f "$pkg" ]; then
continue
fi
PKG_NAME=$(basename "$pkg")
echo "==> Validating: $PKG_NAME"
# For wheels: full integrity check
if [[ "$pkg" == *.whl ]]; then
if ! unzip -t "$pkg" > /dev/null 2>&1; then
echo " ❌ ZIP test FAIL - CORRUPTED WHEEL!"
HAS_ERRORS=1
elif ! python3 -m zipfile -t "$pkg" > /dev/null 2>&1; then
echo " ❌ Python zipfile test FAIL - CORRUPTED WHEEL!"
HAS_ERRORS=1
else
# Run twine check and capture output
twine check "$pkg" 2>&1 | tee /tmp/twine_pypi_output.txt
TWINE_EXIT=${PIPESTATUS[0]}
# Fail on nonzero exit or any error-like output
if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_pypi_output.txt; then
echo " ✅ All checks PASS"
else
echo " ❌ Twine check FAIL"
cat /tmp/twine_pypi_output.txt
HAS_ERRORS=1
fi
rm -f /tmp/twine_pypi_output.txt
fi
# For source dists: gzip + tar integrity
elif [[ "$pkg" == *.tar.gz ]]; then
if ! gzip -t "$pkg" 2>/dev/null; then
echo " ❌ Gzip test FAIL - CORRUPTED TARBALL!"
HAS_ERRORS=1
elif ! tar -tzf "$pkg" > /dev/null 2>&1; then
echo " ❌ Tar test FAIL - CORRUPTED TARBALL!"
HAS_ERRORS=1
else
# Run twine check and capture output
twine check "$pkg" 2>&1 | tee /tmp/twine_pypi_output.txt
TWINE_EXIT=${PIPESTATUS[0]}
# Fail on nonzero exit or any error-like output
if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_pypi_output.txt; then
echo " ✅ All checks PASS"
else
echo " ❌ Twine check FAIL"
cat /tmp/twine_pypi_output.txt
HAS_ERRORS=1
fi
rm -f /tmp/twine_pypi_output.txt
fi
fi
echo ""
done
if [ $HAS_ERRORS -eq 1 ]; then
echo "======================================================================"
echo "❌ PYPI VALIDATION FAILED - UPLOAD BLOCKED"
echo "======================================================================"
echo ""
echo "Corrupted packages detected. PyPI upload BLOCKED."
echo ""
exit 1
else
echo "======================================================================"
echo "✅ ALL PACKAGES VALIDATED - Safe to upload to PyPI"
echo "======================================================================"
fi
- name: Publish to PyPI using bleeding-edge twine
if: steps.pypi_check.outputs.exists == 'false'
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: |
echo "==> Publishing to PyPI using twine from master..."
# Install bleeding-edge packaging and twine for PEP 639 support
# Use --break-system-packages for consistency (safe in CI)
python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git
python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git
echo "Upload environment:"
echo "twine: $(twine --version)"
echo "packaging: $(python3 -m pip show packaging | grep '^Version:')"
echo ""
# Upload to PyPI
twine upload dist/*.whl dist/*.tar.gz --verbose
- name: Trigger RTD build
env:
RTD_TOKEN: ${{ secrets.RTD_TOKEN }}
run: |
if [ -n "$RTD_TOKEN" ]; then
echo "Triggering Read the Docs build for autobahn..."
curl -X POST \
-H "Authorization: Token $RTD_TOKEN" \
"https://readthedocs.org/api/v3/projects/autobahn/versions/latest/builds/"
echo "✅ RTD build triggered successfully"
else
echo "⚠️ RTD_TOKEN not configured, skipping RTD build trigger"
fi
# Mark release as complete (for release-post-comment to detect)
mark-release-complete:
name: Mark Release Complete
needs: [identifiers, release-development, release-nightly, release-stable]
if: always() && needs.identifiers.result == 'success' && (needs.release-development.result == 'success' || needs.release-nightly.result == 'success' || needs.release-stable.result == 'success')
runs-on: ubuntu-latest
env:
RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }}
RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }}
steps:
- name: Create completion marker
run: |
echo "==> Creating release completion marker..."
echo "Release: $RELEASE_NAME"
echo "Type: $RELEASE_TYPE"
# Create completion marker JSON with metadata
cat > release-complete.json <<EOF
{
"release_name": "$RELEASE_NAME",
"release_type": "$RELEASE_TYPE",
"completed_at": "$(date -u +'%Y-%m-%dT%H:%M:%SZ')",
"workflow_run_id": "${{ github.run_id }}",
"commit_sha": "${{ needs.identifiers.outputs.head_sha }}",
"development_status": "${{ needs.release-development.result }}",
"nightly_status": "${{ needs.release-nightly.result }}",
"stable_status": "${{ needs.release-stable.result }}"
}
EOF
echo ""
echo "Completion marker contents:"
cat release-complete.json
- name: Upload completion marker to GitHub Release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "==> Uploading completion marker to release $RELEASE_NAME..."
# Upload the completion marker as a release asset
gh release upload "$RELEASE_NAME" \
release-complete.json \
--repo "$GITHUB_REPOSITORY" \
--clobber
echo "✅ Completion marker uploaded successfully"