diff --git a/.github/workflows/pr-code-coverage.yml b/.github/workflows/pr-code-coverage.yml
new file mode 100644
index 00000000..d00666b9
--- /dev/null
+++ b/.github/workflows/pr-code-coverage.yml
@@ -0,0 +1,491 @@
+name: PR Code Coverage
+
+on:
+ pull_request:
+ branches:
+ - main
+
+jobs:
+ coverage-report:
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ contents: read
+
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Setup git for diff-cover
+ run: |
+ # Fetch the main branch for comparison
+ git fetch origin main:main
+ # Show available branches for debugging
+ echo "Available branches:"
+ git branch -a
+ # Verify main branch exists
+ git show-ref --verify refs/heads/main || echo "Warning: main branch not found"
+ git show-ref --verify refs/remotes/origin/main || echo "Warning: origin/main not found"
+
+ - name: Wait for ADO build to start
+ run: |
+ PR_NUMBER=${{ github.event.pull_request.number }}
+ API_URL="https://dev.azure.com/sqlclientdrivers/public/_apis/build/builds?definitions=2128&queryOrder=queueTimeDescending&%24top=10&api-version=7.1-preview.7"
+
+ echo "Waiting for Azure DevOps build to start for PR #$PR_NUMBER ..."
+
+ for i in {1..30}; do
+ echo "Attempt $i/30: Checking if build has started..."
+
+ # Fetch API response with error handling
+ API_RESPONSE=$(curl -s "$API_URL")
+
+ # Check if response is valid JSON
+ if ! echo "$API_RESPONSE" | jq . >/dev/null 2>&1; then
+ echo "❌ Invalid JSON response from Azure DevOps API"
+ echo "Response received: $API_RESPONSE"
+ echo "This usually indicates the Azure DevOps pipeline has failed or API is unavailable"
+ exit 1
+ fi
+
+ # Parse build info safely
+ BUILD_INFO=$(echo "$API_RESPONSE" | jq -c --arg PR "$PR_NUMBER" '[.value[]? | select(.triggerInfo["pr.number"]?==$PR)] | .[0] // empty' 2>/dev/null)
+
+ if [[ -n "$BUILD_INFO" && "$BUILD_INFO" != "null" && "$BUILD_INFO" != "empty" ]]; then
+ STATUS=$(echo "$BUILD_INFO" | jq -r '.status // "unknown"')
+ RESULT=$(echo "$BUILD_INFO" | jq -r '.result // "unknown"')
+ BUILD_ID=$(echo "$BUILD_INFO" | jq -r '.id // "unknown"')
+ WEB_URL=$(echo "$BUILD_INFO" | jq -r '._links.web.href // "unknown"')
+
+ echo "✅ Found build: ID=$BUILD_ID, Status=$STATUS, Result=$RESULT"
+ echo "🔗 Build URL: $WEB_URL"
+ echo "ADO_URL=$WEB_URL" >> $GITHUB_ENV
+ echo "BUILD_ID=$BUILD_ID" >> $GITHUB_ENV
+
+ # Check if build has failed early
+ if [[ "$STATUS" == "completed" && "$RESULT" == "failed" ]]; then
+ echo "❌ Azure DevOps build $BUILD_ID failed early"
+ echo "This coverage workflow cannot proceed when the main build fails."
+ exit 1
+ fi
+
+ echo "🚀 Build has started, proceeding to poll for coverage artifacts..."
+ break
+ else
+ echo "⏳ No build found for PR #$PR_NUMBER yet... (attempt $i/30)"
+ fi
+
+ if [[ $i -eq 30 ]]; then
+ echo "❌ Timeout: No build found for PR #$PR_NUMBER after 30 attempts"
+ echo "This may indicate the Azure DevOps pipeline was not triggered"
+ exit 1
+ fi
+
+ sleep 10
+ done
+
+ - name: Download and parse coverage report
+ run: |
+ BUILD_ID=${{ env.BUILD_ID }}
+ ARTIFACTS_URL="https://dev.azure.com/SqlClientDrivers/public/_apis/build/builds/$BUILD_ID/artifacts?api-version=7.1-preview.5"
+
+ echo "📥 Polling for coverage artifacts for build $BUILD_ID..."
+
+ # Poll for coverage artifacts with retry logic
+ COVERAGE_ARTIFACT=""
+ for i in {1..60}; do
+ echo "Attempt $i/60: Checking for coverage artifacts..."
+
+ # Fetch artifacts with error handling
+ ARTIFACTS_RESPONSE=$(curl -s "$ARTIFACTS_URL")
+
+ # Check if response is valid JSON
+ if ! echo "$ARTIFACTS_RESPONSE" | jq . >/dev/null 2>&1; then
+ echo "⚠️ Invalid JSON response from artifacts API (attempt $i/60)"
+ if [[ $i -eq 60 ]]; then
+ echo "❌ Persistent API issues after 60 attempts"
+ echo "Response received: $ARTIFACTS_RESPONSE"
+ exit 1
+ fi
+ sleep 30
+ continue
+ fi
+
+ # Show available artifacts for debugging
+ echo "🔍 Available artifacts:"
+ echo "$ARTIFACTS_RESPONSE" | jq -r '.value[]?.name // "No artifacts found"'
+
+ # Find the coverage report artifact
+ COVERAGE_ARTIFACT=$(echo "$ARTIFACTS_RESPONSE" | jq -r '.value[]? | select(.name | test("Code Coverage Report")) | .resource.downloadUrl // empty' 2>/dev/null)
+
+ if [[ -n "$COVERAGE_ARTIFACT" && "$COVERAGE_ARTIFACT" != "null" && "$COVERAGE_ARTIFACT" != "empty" ]]; then
+ echo "✅ Found coverage artifact on attempt $i!"
+ break
+ else
+ echo "⏳ Coverage report not ready yet (attempt $i/60)..."
+ if [[ $i -eq 60 ]]; then
+ echo "❌ Timeout: Coverage report artifact not found after 60 attempts"
+ echo "Available artifacts:"
+ echo "$ARTIFACTS_RESPONSE" | jq -r '.value[]?.name // "No artifacts found"'
+ exit 1
+ fi
+ sleep 30
+ fi
+ done
+
+ if [[ -n "$COVERAGE_ARTIFACT" && "$COVERAGE_ARTIFACT" != "null" && "$COVERAGE_ARTIFACT" != "empty" ]]; then
+ echo "📊 Downloading coverage report..."
+ if ! curl -L "$COVERAGE_ARTIFACT" -o coverage-report.zip --fail --silent; then
+ echo "❌ Failed to download coverage report from Azure DevOps"
+ echo "This indicates the coverage artifacts may not be available or accessible"
+ exit 1
+ fi
+
+ if ! unzip -o -q coverage-report.zip; then
+ echo "❌ Failed to extract coverage artifacts"
+ echo "Trying to extract with verbose output for debugging..."
+ unzip -l coverage-report.zip || echo "Failed to list archive contents"
+ exit 1
+ fi
+
+ # Find the main index.html file
+ INDEX_FILE=$(find . -name "index.html" -path "*/Code Coverage Report*" | head -1)
+
+ if [[ -f "$INDEX_FILE" ]]; then
+ echo "🔍 Parsing coverage data from $INDEX_FILE..."
+
+ # Debug: Show relevant parts of the HTML
+ echo "Debug: Looking for coverage data..."
+ grep -n "cardpercentagebar\|Covered lines\|Coverable lines" "$INDEX_FILE" | head -10
+
+ # Extract coverage metrics using simpler, more reliable patterns
+ OVERALL_PERCENTAGE=$(grep -o 'cardpercentagebar[0-9]*">[0-9]*%' "$INDEX_FILE" | head -1 | grep -o '[0-9]*%')
+ COVERED_LINES=$(grep -A1 "Covered lines:" "$INDEX_FILE" | grep -o 'title="[0-9]*"' | head -1 | grep -o '[0-9]*')
+ TOTAL_LINES=$(grep -A1 "Coverable lines:" "$INDEX_FILE" | grep -o 'title="[0-9]*"' | head -1 | grep -o '[0-9]*')
+
+ # Fallback method if the above doesn't work
+ if [[ -z "$OVERALL_PERCENTAGE" ]]; then
+ echo "Trying alternative parsing method..."
+ OVERALL_PERCENTAGE=$(grep -o 'large.*">[0-9]*%' "$INDEX_FILE" | head -1 | grep -o '[0-9]*%')
+ fi
+
+ echo "Extracted values:"
+ echo "OVERALL_PERCENTAGE=$OVERALL_PERCENTAGE"
+ echo "COVERED_LINES=$COVERED_LINES"
+ echo "TOTAL_LINES=$TOTAL_LINES"
+
+ # Validate that we got the essential data
+ if [[ -z "$OVERALL_PERCENTAGE" ]]; then
+ echo "❌ Could not extract coverage percentage from the report"
+ echo "The coverage report format may have changed or be incomplete"
+ exit 1
+ fi
+
+ echo "COVERAGE_PERCENTAGE=$OVERALL_PERCENTAGE" >> $GITHUB_ENV
+ echo "COVERED_LINES=${COVERED_LINES:-N/A}" >> $GITHUB_ENV
+ echo "TOTAL_LINES=${TOTAL_LINES:-N/A}" >> $GITHUB_ENV
+
+ # Extract top files with low coverage - improved approach
+ echo "📋 Extracting file-level coverage..."
+
+ # Extract file coverage data more reliably
+ LOW_COVERAGE_FILES=$(grep -o '
[^<]* | [0-9]* | [0-9]* | [0-9]* | [0-9]* | [0-9]*\.[0-9]*%' "$INDEX_FILE" | \
+ sed 's/ | \([^<]*\)<\/a><\/td>.*class="right">\([0-9]*\.[0-9]*\)%/\1: \2%/' | \
+ sort -t: -k2 -n | head -10)
+
+ # Alternative method if above fails
+ if [[ -z "$LOW_COVERAGE_FILES" ]]; then
+ echo "Trying alternative file parsing..."
+ LOW_COVERAGE_FILES=$(grep -E "\.py.*[0-9]+\.[0-9]+%" "$INDEX_FILE" | \
+ grep -o "[^>]*\.py[^<]*.*[0-9]*\.[0-9]*%" | \
+ sed 's/\([^<]*\)<\/a>.*\([0-9]*\.[0-9]*\)%/\1: \2%/' | \
+ sort -t: -k2 -n | head -10)
+ fi
+
+ echo "LOW_COVERAGE_FILES<> $GITHUB_ENV
+ echo "${LOW_COVERAGE_FILES:-No detailed file data available}" >> $GITHUB_ENV
+ echo "EOF" >> $GITHUB_ENV
+
+ echo "✅ Coverage data extracted successfully"
+ else
+ echo "❌ Could not find index.html in coverage report"
+ echo "Available files in the coverage report:"
+ find . -name "*.html" | head -10 || echo "No HTML files found"
+ exit 1
+ fi
+ else
+ echo "❌ Could not find coverage report artifact"
+ echo "Available artifacts from the build:"
+ echo "$ARTIFACTS_RESPONSE" | jq -r '.value[]?.name // "No artifacts found"' 2>/dev/null || echo "Could not parse artifacts list"
+ echo "This indicates the Azure DevOps build may not have generated coverage reports"
+ exit 1
+ fi
+
+ - name: Download coverage XML from ADO
+ run: |
+ # Download the Cobertura XML directly from the CodeCoverageReport job
+ BUILD_ID=${{ env.BUILD_ID }}
+ ARTIFACTS_URL="https://dev.azure.com/SqlClientDrivers/public/_apis/build/builds/$BUILD_ID/artifacts?api-version=7.1-preview.5"
+
+ echo "📥 Fetching artifacts for build $BUILD_ID to find coverage files..."
+
+ # Fetch artifacts with error handling
+ ARTIFACTS_RESPONSE=$(curl -s "$ARTIFACTS_URL")
+
+ # Check if response is valid JSON
+ if ! echo "$ARTIFACTS_RESPONSE" | jq . >/dev/null 2>&1; then
+ echo "❌ Invalid JSON response from artifacts API"
+ echo "Response received: $ARTIFACTS_RESPONSE"
+ exit 1
+ fi
+
+ echo "🔍 Available artifacts:"
+ echo "$ARTIFACTS_RESPONSE" | jq -r '.value[]?.name // "No artifacts found"'
+
+ # Look for the unified coverage artifact from CodeCoverageReport job
+ COVERAGE_XML_ARTIFACT=$(echo "$ARTIFACTS_RESPONSE" | jq -r '.value[]? | select(.name | test("unified-coverage|Code Coverage Report|coverage")) | .resource.downloadUrl // empty' 2>/dev/null | head -1)
+
+ if [[ -n "$COVERAGE_XML_ARTIFACT" && "$COVERAGE_XML_ARTIFACT" != "null" && "$COVERAGE_XML_ARTIFACT" != "empty" ]]; then
+ echo "📊 Downloading coverage artifact from: $COVERAGE_XML_ARTIFACT"
+ if ! curl -L "$COVERAGE_XML_ARTIFACT" -o coverage-artifacts.zip --fail --silent; then
+ echo "❌ Failed to download coverage artifacts"
+ exit 1
+ fi
+
+ if ! unzip -o -q coverage-artifacts.zip; then
+ echo "❌ Failed to extract coverage artifacts"
+ echo "Trying to extract with verbose output for debugging..."
+ unzip -l coverage-artifacts.zip || echo "Failed to list archive contents"
+ exit 1
+ fi
+
+ echo "🔍 Looking for coverage XML files in extracted artifacts..."
+ find . -name "*.xml" -type f | head -10
+
+ # Look for the main coverage.xml file in unified-coverage directory or any coverage XML
+ if [[ -f "unified-coverage/coverage.xml" ]]; then
+ echo "✅ Found unified coverage file at unified-coverage/coverage.xml"
+ cp "unified-coverage/coverage.xml" ./coverage.xml
+ elif [[ -f "coverage.xml" ]]; then
+ echo "✅ Found coverage.xml in root directory"
+ # Already in the right place
+ else
+ # Try to find any coverage XML file
+ COVERAGE_FILE=$(find . -name "*coverage*.xml" -type f | head -1)
+ if [[ -n "$COVERAGE_FILE" ]]; then
+ echo "✅ Found coverage file: $COVERAGE_FILE"
+ cp "$COVERAGE_FILE" ./coverage.xml
+ else
+ echo "❌ No coverage XML file found in artifacts"
+ echo "Available files:"
+ find . -name "*.xml" -type f
+ exit 1
+ fi
+ fi
+
+ echo "✅ Coverage XML file is ready at ./coverage.xml"
+ ls -la ./coverage.xml
+ else
+ echo "❌ Could not find coverage artifacts"
+ echo "This indicates the Azure DevOps CodeCoverageReport job may not have run successfully"
+ exit 1
+ fi
+
+ - name: Generate patch coverage report
+ run: |
+ # Install dependencies
+ pip install diff-cover jq
+ sudo apt-get update && sudo apt-get install -y libxml2-utils
+
+ # Verify coverage.xml exists before proceeding
+ if [[ ! -f coverage.xml ]]; then
+ echo "❌ coverage.xml not found in current directory"
+ echo "Available files:"
+ ls -la | head -20
+ exit 1
+ fi
+
+ echo "✅ coverage.xml found, size: $(wc -c < coverage.xml) bytes"
+ echo "🔍 Coverage file preview (first 10 lines):"
+ head -10 coverage.xml
+
+ # Generate diff coverage report using the new command format
+ echo "🚀 Generating patch coverage report..."
+
+ # Debug: Show git status and branches before running diff-cover
+ echo "🔍 Git status before diff-cover:"
+ git status --porcelain || echo "Git status failed"
+ echo "Current branch: $(git branch --show-current)"
+ echo "Available branches:"
+ git branch -a
+ echo "Checking if main branch is accessible:"
+ git log --oneline -n 5 main || echo "Could not access main branch"
+
+ # Debug: Show what diff-cover will analyze
+ echo "🔍 Git diff analysis:"
+ echo "Files changed between main and current branch:"
+ git diff --name-only main || echo "Could not get diff"
+ echo "Detailed diff for Python files:"
+ git diff main -- "*.py" | head -50 || echo "Could not get Python diff"
+
+ # Debug: Check coverage.xml content for specific files
+ echo "🔍 Coverage.xml analysis:"
+ echo "Python files mentioned in coverage.xml:"
+ grep -o 'filename="[^"]*\.py"' coverage.xml | head -10 || echo "Could not extract filenames"
+ echo "Sample coverage data:"
+ head -20 coverage.xml
+
+ # Use the new format for diff-cover commands
+ echo "🚀 Running diff-cover..."
+ diff-cover coverage.xml \
+ --compare-branch=main \
+ --html-report patch-coverage.html \
+ --json-report patch-coverage.json \
+ --markdown-report patch-coverage.md || {
+ echo "❌ diff-cover failed with exit code $?"
+ echo "Checking if coverage.xml is valid XML..."
+ if ! xmllint --noout coverage.xml 2>/dev/null; then
+ echo "❌ coverage.xml is not valid XML"
+ echo "First 50 lines of coverage.xml:"
+ head -50 coverage.xml
+ else
+ echo "✅ coverage.xml is valid XML"
+ echo "🔍 diff-cover verbose output:"
+ diff-cover coverage.xml --compare-branch=main --markdown-report debug-patch-coverage.md -v || echo "Verbose diff-cover also failed"
+ fi
+ # Don't exit here, let's see what files were created
+ }
+
+ # Check what files were generated
+ echo "🔍 Files generated after diff-cover:"
+ ls -la patch-coverage.* || echo "No patch-coverage files found"
+ ls -la *.md *.html *.json | grep -E "(patch|coverage)" || echo "No coverage-related files found"
+
+ # Extract patch coverage percentage
+ if [[ -f patch-coverage.json ]]; then
+ echo "🔍 Patch coverage analysis from JSON:"
+ echo "Raw JSON content:"
+ cat patch-coverage.json | jq . || echo "Could not parse JSON"
+
+ PATCH_COVERAGE=$(jq -r '.total_percent_covered // "N/A"' patch-coverage.json)
+ TOTAL_STATEMENTS=$(jq -r '.total_num_lines // "N/A"' patch-coverage.json)
+ MISSING_STATEMENTS=$(jq -r '.total_num_missing // "N/A"' patch-coverage.json)
+
+ echo "✅ Patch coverage: ${PATCH_COVERAGE}%"
+ echo "📊 Total lines: $TOTAL_STATEMENTS, Missing: $MISSING_STATEMENTS"
+
+ # Debug: Show per-file breakdown
+ echo "📁 Per-file coverage breakdown:"
+ jq -r '.src_stats // {} | to_entries[] | "\(.key): \(.value.percent_covered)% (\(.value.num_lines) lines, \(.value.num_missing) missing)"' patch-coverage.json || echo "Could not extract per-file stats"
+
+ echo "PATCH_COVERAGE_PCT=${PATCH_COVERAGE}%" >> $GITHUB_ENV
+ elif [[ -f patch-coverage.md ]]; then
+ echo "🔍 Extracting patch coverage from markdown file:"
+ echo "Markdown content:"
+ cat patch-coverage.md
+
+ # Extract coverage percentage from markdown
+ PATCH_COVERAGE=$(grep -o "Coverage.*[0-9]*%" patch-coverage.md | grep -o "[0-9]*%" | head -1 | sed 's/%//')
+ TOTAL_LINES=$(grep -o "Total.*[0-9]* lines" patch-coverage.md | grep -o "[0-9]*" | head -1)
+ MISSING_LINES=$(grep -o "Missing.*[0-9]* lines" patch-coverage.md | grep -o "[0-9]*" | tail -1)
+
+ if [[ -n "$PATCH_COVERAGE" ]]; then
+ echo "✅ Extracted patch coverage: ${PATCH_COVERAGE}%"
+ echo "📊 Total lines: $TOTAL_LINES, Missing: $MISSING_LINES"
+ echo "PATCH_COVERAGE_PCT=${PATCH_COVERAGE}%" >> $GITHUB_ENV
+ else
+ echo "⚠️ Could not extract coverage percentage from markdown"
+ echo "PATCH_COVERAGE_PCT=Could not parse" >> $GITHUB_ENV
+ fi
+ else
+ echo "⚠️ No patch coverage files generated"
+ echo "🔍 Checking for other output files:"
+ ls -la *coverage* || echo "No coverage files found"
+ echo "PATCH_COVERAGE_PCT=Report not generated" >> $GITHUB_ENV
+ fi
+
+ # Extract summary for comment
+ if [[ -f patch-coverage.md ]]; then
+ echo "PATCH_COVERAGE_SUMMARY<> $GITHUB_ENV
+ cat patch-coverage.md >> $GITHUB_ENV
+ echo "EOF" >> $GITHUB_ENV
+ echo "✅ Patch coverage markdown summary ready"
+ else
+ echo "⚠️ patch-coverage.md not generated"
+ echo "PATCH_COVERAGE_SUMMARY=Patch coverage report could not be generated." >> $GITHUB_ENV
+ fi
+
+ - name: Comment coverage summary on PR
+ uses: marocchino/sticky-pull-request-comment@v2
+ with:
+ header: Code Coverage Report
+ message: |
+ # 📊 Code Coverage Report
+
+
+
+
+
+ ### 🔥 Diff Coverage
+ ### **${{ env.PATCH_COVERAGE_PCT }}**
+
+ |
+
+
+ ### 🎯 Overall Coverage
+ ### **${{ env.COVERAGE_PERCENTAGE }}**
+
+ |
+
+
+ **📈 Total Lines Covered:** `${{ env.COVERED_LINES }}` out of `${{ env.TOTAL_LINES }}`
+ **📁 Project:** `mssql-python`
+
+ |
+
+
+
+ ---
+
+ ${{ env.PATCH_COVERAGE_SUMMARY }}
+
+ ---
+ ### 📋 Files Needing Attention
+
+
+ 📉 Files with overall lowest coverage (click to expand)
+
+
+ ```diff
+ ${{ env.LOW_COVERAGE_FILES }}
+ ```
+
+
+
+ ---
+ ### 🔗 Quick Links
+
+
+
+ |
+ ⚙️ Build Summary
+ |
+
+ 📋 Coverage Details
+ |
+
+
+ |
+
+ [View Azure DevOps Build](${{ env.ADO_URL }})
+
+ |
+
+
+ [Browse Full Coverage Report](${{ env.ADO_URL }}&view=codecoverage-tab)
+
+ |
+
+
\ No newline at end of file
diff --git a/.github/workflows/pr-format-check.yml b/.github/workflows/pr-format-check.yml
index 3155eed9..55c3129d 100644
--- a/.github/workflows/pr-format-check.yml
+++ b/.github/workflows/pr-format-check.yml
@@ -94,24 +94,35 @@ jobs:
labelToAdd = 'pr-size: large';
}
- // Remove existing size labels if any
+ // Get existing labels
const existingLabels = pr.labels.map(l => l.name);
const sizeLabels = ['pr-size: small', 'pr-size: medium', 'pr-size: large'];
- for (const label of existingLabels) {
- if (sizeLabels.includes(label)) {
+
+ // Find current size label (if any)
+ const currentSizeLabel = existingLabels.find(label => sizeLabels.includes(label));
+
+ // Only make changes if the label needs to be updated
+ if (currentSizeLabel !== labelToAdd) {
+ console.log(`Current size label: ${currentSizeLabel || 'none'}`);
+ console.log(`Required size label: ${labelToAdd} (Total changes: ${totalChanges})`);
+
+ // Remove existing size label if different from required
+ if (currentSizeLabel) {
+ console.log(`Removing outdated label: ${currentSizeLabel}`);
await github.rest.issues.removeLabel({
...context.repo,
issue_number: pr.number,
- name: label,
+ name: currentSizeLabel,
});
}
- }
- // Add new size label
- await github.rest.issues.addLabels({
- ...context.repo,
- issue_number: pr.number,
- labels: [labelToAdd],
- });
-
- console.log(`Added label: ${labelToAdd} (Total changes: ${totalChanges})`);
+ // Add new size label
+ console.log(`Adding new label: ${labelToAdd}`);
+ await github.rest.issues.addLabels({
+ ...context.repo,
+ issue_number: pr.number,
+ labels: [labelToAdd],
+ });
+ } else {
+ console.log(`Label already correct: ${labelToAdd} (Total changes: ${totalChanges}) - no changes needed`);
+ }
diff --git a/eng/pipelines/pr-validation-pipeline.yml b/eng/pipelines/pr-validation-pipeline.yml
index 51778489..d2ede247 100644
--- a/eng/pipelines/pr-validation-pipeline.yml
+++ b/eng/pipelines/pr-validation-pipeline.yml
@@ -110,11 +110,11 @@ jobs:
testResultsFiles: '**/test-results.xml'
testRunTitle: 'Publish test results'
- - task: PublishCodeCoverageResults@1
- inputs:
- codeCoverageTool: 'Cobertura'
- summaryFileLocation: 'coverage.xml'
- displayName: 'Publish code coverage results'
+ # - task: PublishCodeCoverageResults@1
+ # inputs:
+ # codeCoverageTool: 'Cobertura'
+ # summaryFileLocation: 'coverage.xml'
+ # displayName: 'Publish code coverage results'
- job: PytestOnMacOS
displayName: 'macOS x86_64'
@@ -1515,3 +1515,79 @@ jobs:
inputs:
testResultsFiles: '**/test-results-alpine-arm64.xml'
testRunTitle: 'Publish pytest results on Alpine ARM64'
+
+- job: CodeCoverageReport
+ displayName: 'Full Code Coverage Report in Ubuntu x86_64'
+ pool:
+ vmImage: 'ubuntu-latest'
+
+ steps:
+ - script: |
+ # Install build dependencies
+ sudo apt-get update
+ sudo apt-get install -y cmake gcc g++ lcov unixodbc-dev llvm clang
+ displayName: 'Install build dependencies'
+
+ - script: |
+ # Start SQL Server container
+ docker pull mcr.microsoft.com/mssql/server:2022-latest
+ docker run \
+ --name sqlserver \
+ -e ACCEPT_EULA=Y \
+ -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \
+ -p 1433:1433 \
+ -d mcr.microsoft.com/mssql/server:2022-latest
+
+ # Wait until SQL Server is ready
+ for i in {1..30}; do
+ docker exec sqlserver \
+ /opt/mssql-tools18/bin/sqlcmd \
+ -S localhost \
+ -U SA \
+ -P "$(DB_PASSWORD)" \
+ -C -Q "SELECT 1" && break
+ sleep 2
+ done
+ displayName: 'Start SQL Server container'
+ env:
+ DB_PASSWORD: $(DB_PASSWORD)
+
+ - script: |
+ # Install Python dependencies
+ python -m pip install --upgrade pip
+ pip install -r requirements.txt
+ pip install coverage-lcov lcov-cobertura
+ displayName: 'Install Python dependencies'
+
+ - script: |
+ # Build pybind bindings with coverage instrumentation
+ cd mssql_python/pybind
+ ./build.sh codecov
+ displayName: 'Build pybind bindings with coverage'
+
+ - script: |
+ # Generate unified coverage (Python + C++)
+ chmod +x ./generate_codecov.sh
+ ./generate_codecov.sh
+
+ # Convert unified LCOV to Cobertura XML for ADO reporting
+ lcov_cobertura total.info --output unified-coverage/coverage.xml
+ displayName: 'Generate unified coverage (Python + C++)'
+ env:
+ DB_CONNECTION_STRING: 'Driver=ODBC Driver 18 for SQL Server;Server=tcp:127.0.0.1,1433;Database=master;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes'
+ DB_PASSWORD: $(DB_PASSWORD)
+
+ - task: PublishTestResults@2
+ condition: succeededOrFailed()
+ inputs:
+ testResultsFiles: '**/test-results.xml'
+ testRunTitle: 'Publish pytest results with unified coverage'
+
+ - task: PublishCodeCoverageResults@2
+ condition: succeededOrFailed()
+ inputs:
+ codeCoverageTool: Cobertura
+ summaryFileLocation: 'unified-coverage/coverage.xml'
+ reportDirectory: 'unified-coverage'
+ failIfCoverageEmpty: true
+ displayName: 'Publish unified code coverage results'
diff --git a/generate_codecov.sh b/generate_codecov.sh
new file mode 100644
index 00000000..d9c69018
--- /dev/null
+++ b/generate_codecov.sh
@@ -0,0 +1,103 @@
+#!/bin/bash
+set -euo pipefail
+
+echo "==================================="
+echo "[STEP 1] Installing dependencies"
+echo "==================================="
+
+# Update package list
+sudo apt-get update
+
+# Install LLVM (for llvm-profdata, llvm-cov)
+if ! command -v llvm-profdata &>/dev/null; then
+ echo "[ACTION] Installing LLVM via apt"
+ sudo apt-get install -y llvm
+fi
+
+# Install lcov (provides lcov + genhtml)
+if ! command -v genhtml &>/dev/null; then
+ echo "[ACTION] Installing lcov via apt"
+ sudo apt-get install -y lcov
+fi
+
+# Install Python plugin for LCOV export
+if ! python -m pip show coverage-lcov &>/dev/null; then
+ echo "[ACTION] Installing coverage-lcov via pip"
+ python -m pip install coverage-lcov
+fi
+
+# Install LCOV → Cobertura converter (for ADO)
+if ! python -m pip show lcov-cobertura &>/dev/null; then
+ echo "[ACTION] Installing lcov-cobertura via pip"
+ python -m pip install lcov-cobertura
+fi
+
+echo "==================================="
+echo "[STEP 2] Running pytest with Python coverage"
+echo "==================================="
+
+# Cleanup old coverage
+rm -f .coverage coverage.xml python-coverage.info cpp-coverage.info total.info
+rm -rf htmlcov unified-coverage
+
+# Run pytest with Python coverage (XML + HTML output)
+python -m pytest -v \
+ --junitxml=test-results.xml \
+ --cov=mssql_python \
+ --cov-report=xml:coverage.xml \
+ --cov-report=html \
+ --capture=tee-sys \
+ --cache-clear
+
+# Convert Python coverage to LCOV format (restrict to repo only)
+echo "[ACTION] Converting Python coverage to LCOV"
+coverage lcov -o python-coverage.info --include="mssql_python/*"
+
+echo "==================================="
+echo "[STEP 3] Processing C++ coverage (Clang/LLVM)"
+echo "==================================="
+
+# Merge raw profile data from pybind runs
+if [ ! -f default.profraw ]; then
+ echo "[ERROR] default.profraw not found. Did you build with -fprofile-instr-generate?"
+ exit 1
+fi
+
+llvm-profdata merge -sparse default.profraw -o default.profdata
+
+# Find the pybind .so file (Linux build)
+PYBIND_SO=$(find mssql_python -name "*.so" | head -n 1)
+if [ -z "$PYBIND_SO" ]; then
+ echo "[ERROR] Could not find pybind .so"
+ exit 1
+fi
+
+echo "[INFO] Using pybind module: $PYBIND_SO"
+
+# Export C++ coverage, excluding Python headers, pybind11, and system includes
+llvm-cov export "$PYBIND_SO" \
+ -instr-profile=default.profdata \
+ -ignore-filename-regex='(python3\.[0-9]+|cpython|pybind11|/usr/include/|/usr/lib/)' \
+ --skip-functions \
+ -format=lcov > cpp-coverage.info
+
+echo "==================================="
+echo "[STEP 4] Merging Python + C++ coverage"
+echo "==================================="
+
+# Merge LCOV reports (ignore inconsistencies in Python LCOV export)
+lcov -a python-coverage.info -a cpp-coverage.info -o total.info \
+ --ignore-errors inconsistent,corrupt
+
+# Normalize paths so everything starts from mssql_python/
+echo "[ACTION] Normalizing paths in LCOV report"
+sed -i "s|$(pwd)/||g" total.info
+
+# Generate full HTML report
+genhtml total.info \
+ --output-directory unified-coverage \
+ --quiet \
+ --title "Unified Coverage Report"
+
+# Generate Cobertura XML (for Azure DevOps Code Coverage tab)
+lcov_cobertura total.info --output coverage.xml
diff --git a/mssql_python/pybind/build.sh b/mssql_python/pybind/build.sh
index dbd1e6c3..7a20b61c 100755
--- a/mssql_python/pybind/build.sh
+++ b/mssql_python/pybind/build.sh
@@ -26,6 +26,13 @@ else
exit 1
fi
+# Check for coverage mode and set flags accordingly
+COVERAGE_MODE=false
+if [[ "${1:-}" == "codecov" || "${1:-}" == "--coverage" ]]; then
+ COVERAGE_MODE=true
+ echo "[MODE] Enabling Clang coverage instrumentation"
+fi
+
# Get Python version from active interpreter
PYTAG=$(python -c "import sys; print(f'{sys.version_info.major}{sys.version_info.minor}')")
@@ -47,20 +54,30 @@ if [ -d "build" ]; then
echo "Build directory removed."
fi
-# Create build directory for universal binary
+# Create build directory
BUILD_DIR="${SOURCE_DIR}/build"
mkdir -p "${BUILD_DIR}"
cd "${BUILD_DIR}"
echo "[DIAGNOSTIC] Changed to build directory: ${BUILD_DIR}"
-# Configure CMake (architecture settings handled in CMakeLists.txt)
+# Configure CMake (with Clang coverage instrumentation on Linux only - codecov is not supported for macOS)
echo "[DIAGNOSTIC] Running CMake configure"
-if [[ "$OS" == "macOS" ]]; then
- echo "[DIAGNOSTIC] Configuring for macOS (universal2 is set automatically)"
- cmake -DMACOS_STRING_FIX=ON "${SOURCE_DIR}"
+if [[ "$COVERAGE_MODE" == "true" && "$OS" == "Linux" ]]; then
+ echo "[ACTION] Configuring for Linux with Clang coverage instrumentation"
+ cmake -DARCHITECTURE="$DETECTED_ARCH" \
+ -DCMAKE_C_COMPILER=clang \
+ -DCMAKE_CXX_COMPILER=clang++ \
+ -DCMAKE_CXX_FLAGS="-fprofile-instr-generate -fcoverage-mapping" \
+ -DCMAKE_C_FLAGS="-fprofile-instr-generate -fcoverage-mapping" \
+ "${SOURCE_DIR}"
else
- echo "[DIAGNOSTIC] Configuring for Linux with architecture: $DETECTED_ARCH"
- cmake -DARCHITECTURE="$DETECTED_ARCH" "${SOURCE_DIR}"
+ if [[ "$OS" == "macOS" ]]; then
+ echo "[ACTION] Configuring for macOS (default build)"
+ cmake -DMACOS_STRING_FIX=ON "${SOURCE_DIR}"
+ else
+ echo "[ACTION] Configuring for Linux with architecture: $DETECTED_ARCH"
+ cmake -DARCHITECTURE="$DETECTED_ARCH" "${SOURCE_DIR}"
+ fi
fi
# Check if CMake configuration succeeded
|