diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..8b8ab88
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,73 @@
+name: CI
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+permissions:
+ contents: read
+
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - run: pip install ruff
+ - run: ruff check skills/ --config pyproject.toml
+ - run: ruff format --check skills/
+
+ test-iam-departures:
+ runs-on: ubuntu-latest
+ needs: lint
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - run: pip install boto3 moto pytest
+ - run: cd skills/iam-departures-remediation && pytest tests/ -v
+
+ validate-cloudformation:
+ runs-on: ubuntu-latest
+ needs: lint
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - run: pip install cfn-lint
+ - run: cfn-lint skills/iam-departures-remediation/infra/cloudformation.yaml
+ - run: cfn-lint skills/iam-departures-remediation/infra/cross_account_stackset.yaml
+
+ validate-terraform:
+ runs-on: ubuntu-latest
+ needs: lint
+ steps:
+ - uses: actions/checkout@v4
+ - uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: "1.12.0"
+ - run: cd skills/iam-departures-remediation/infra/terraform && terraform init -backend=false && terraform validate
+
+ security-scan:
+ runs-on: ubuntu-latest
+ needs: lint
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - run: pip install bandit
+ - run: bandit -r skills/ -c pyproject.toml --severity-level medium || true
+ - name: Check for hardcoded secrets
+ run: |
+ # Fail if any obvious secret patterns found in Python source
+ ! grep -rn "AKIA[A-Z0-9]\{16\}" skills/ --include="*.py" || exit 1
+ ! grep -rn "sk-[a-zA-Z0-9]\{20,\}" skills/ --include="*.py" || exit 1
+ ! grep -rn "ghp_[a-zA-Z0-9]\{36\}" skills/ --include="*.py" || exit 1
+ echo "No hardcoded secrets found"
diff --git a/.gitignore b/.gitignore
index c18dd8d..77629ab 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,52 @@
+# Python
__pycache__/
+*.py[cod]
+*$py.class
+*.so
+*.egg-info/
+dist/
+build/
+.eggs/
+*.egg
+
+# Virtual environments
+.venv/
+venv/
+env/
+
+# IDE
+.vscode/
+.idea/
+*.swp
+*.swo
+*~
+
+# OS
+.DS_Store
+Thumbs.db
+
+# Testing
+.pytest_cache/
+.coverage
+htmlcov/
+.mypy_cache/
+.ruff_cache/
+
+# Terraform
+.terraform/
+*.tfstate
+*.tfstate.backup
+*.tfplan
+.terraform.lock.hcl
+
+# Secrets (never commit)
+.env
+.env.*
+!.env.example
+*.pem
+*.key
+terraform.tfvars
+
+# Build artifacts
+*.zip
+*.tar.gz
diff --git a/CLAUDE.md b/CLAUDE.md
new file mode 100644
index 0000000..5b592f2
--- /dev/null
+++ b/CLAUDE.md
@@ -0,0 +1,58 @@
+# Cloud Security Skills Collection
+
+This repository contains production-ready cloud security automations structured as skills for AI agents.
+
+## Repository structure
+
+```
+skills/
+ iam-departures-remediation/ — Multi-cloud IAM cleanup for departed employees
+ cspm-aws-cis-benchmark/ — CIS AWS Foundations v3.0 (18 checks)
+ cspm-gcp-cis-benchmark/ — CIS GCP Foundations v3.0 (20 checks + 5 Vertex AI)
+ cspm-azure-cis-benchmark/ — CIS Azure Foundations v2.1 (19 checks + 5 AI Foundry)
+ vuln-remediation-pipeline/ — Auto-remediate supply chain vulnerabilities
+```
+
+## Conventions
+
+- Each skill has a `SKILL.md` with frontmatter (name, description, license, compatibility, metadata, frameworks).
+- Source code lives in `src/` within each skill directory.
+- Infrastructure-as-code lives in `infra/` (CloudFormation, Terraform, StackSets).
+- Tests live in `tests/` within each skill directory.
+- All skills are Apache 2.0 licensed.
+- Python 3.11+ required. Type hints used throughout.
+- No hardcoded credentials. All secrets via environment variables or AWS Secrets Manager.
+
+## Security model
+
+- CSPM skills are read-only (no write permissions to cloud accounts).
+- Remediation skills use least-privilege IAM with cross-account STS AssumeRole.
+- Deny policies protect root, break-glass, and emergency accounts from deletion.
+- All S3 artifacts are KMS-encrypted. DynamoDB tables use encryption at rest.
+
+## Compliance frameworks referenced
+
+MITRE ATT&CK, NIST CSF 2.0, CIS Controls v8, CIS AWS/GCP/Azure Foundations, SOC 2 TSC, ISO 27001:2022, PCI DSS 4.0, OWASP LLM Top 10, OWASP MCP Top 10.
+
+## Running checks
+
+```bash
+# AWS CIS benchmark
+pip install boto3
+python skills/cspm-aws-cis-benchmark/src/checks.py --region us-east-1
+
+# GCP CIS benchmark
+pip install google-cloud-iam google-cloud-storage google-cloud-compute
+python skills/cspm-gcp-cis-benchmark/src/checks.py --project my-project
+
+# Azure CIS benchmark
+pip install azure-identity azure-mgmt-authorization azure-mgmt-storage azure-mgmt-monitor azure-mgmt-network
+python skills/cspm-azure-cis-benchmark/src/checks.py --subscription-id SUB_ID
+
+# IAM departures tests
+cd skills/iam-departures-remediation && pip install boto3 moto pytest && pytest tests/ -v
+```
+
+## Integration with agent-bom
+
+This repo provides the security automations. [agent-bom](https://github.com/msaad00/agent-bom) provides continuous scanning and compliance validation. Use together for detection + response.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..0f3f732
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,50 @@
+# Contributing
+
+Contributions are welcome. This repo follows a skills-based structure — each security automation is a self-contained skill under `skills/`.
+
+## Adding a new skill
+
+1. Create a directory under `skills/` with a descriptive name (e.g., `skills/cspm-snowflake-cis-benchmark/`)
+2. Add a `SKILL.md` with the required frontmatter:
+
+```yaml
+---
+name: your-skill-name
+description: >-
+ One-paragraph description of what this skill does and when to use it.
+license: Apache-2.0
+compatibility: >-
+ Runtime requirements (Python version, cloud SDKs, permissions needed).
+metadata:
+ author: your-github-handle
+ version: 0.1.0
+ frameworks:
+ - Framework names this skill maps to
+ cloud: aws | gcp | azure | multi
+---
+```
+
+3. Put source code in `src/` within your skill directory
+4. Put infrastructure-as-code in `infra/` (CloudFormation, Terraform)
+5. Put tests in `tests/` — every skill should have tests
+6. Add your skill to the table in `README.md`
+
+## Code standards
+
+- Python 3.11+ with type hints
+- No hardcoded credentials — use environment variables or AWS Secrets Manager
+- Least-privilege IAM — document every permission your skill needs
+- Tests use `pytest` with `moto` for AWS mocking
+- Map to compliance frameworks where applicable (CIS, MITRE, NIST, OWASP)
+
+## Pull request process
+
+1. Fork the repo and create a feature branch
+2. Add or modify skills following the structure above
+3. Ensure tests pass: `pytest skills/your-skill/tests/ -v`
+4. Ensure linting passes: `ruff check .`
+5. Open a PR against `main` with a clear description
+
+## Security
+
+If you find a security vulnerability, do NOT open a public issue. See [SECURITY.md](SECURITY.md) for responsible disclosure instructions.
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..b458186
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,190 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to the Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by the Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding any notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ Copyright 2025 msaad00
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
index 5089108..61fa285 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,9 @@
# cloud-security
+[](https://github.com/msaad00/cloud-security/actions/workflows/ci.yml)
+[](LICENSE)
+[](https://www.python.org/downloads/)
+
Production-ready cloud security automations — deployable code, CIS benchmark assessments, multi-cloud identity remediation, and compliance-mapped skills for AI agents.
## Skills
@@ -14,83 +18,130 @@ Production-ready cloud security automations — deployable code, CIS benchmark a
## Architecture — IAM Departures Remediation
-```
- EXTERNAL HR DATA SOURCES
- ┌──────────────────────────────────────────────────────────────────────┐
- │ │
- │ ┌───────────┐ ┌────────────┐ ┌────────────┐ ┌──────────────┐ │
- │ │ Workday │ │ Snowflake │ │ Databricks │ │ ClickHouse │ │
- │ │ (API) │ │ (SQL/S.I.) │ │ (Unity) │ │ (SQL) │ │
- │ └─────┬─────┘ └─────┬──────┘ └─────┬──────┘ └──────┬───────┘ │
- │ └───────────────┴───────┬───────┴─────────────────┘ │
- └────────────────────────────────┼─────────────────────────────────────┘
- │
- ▼
- ┌────────────────────────────────────────────────────────────────────────┐
- │ AWS Organization — Security OU Account │
- │ │
- │ ┌──────────────────────────────────────────┐ │
- │ │ Reconciler (src/reconciler/) │ │
- │ │ sources.py → DepartureRecord[] │ │
- │ │ change_detect.py → SHA-256 row diff │ │
- │ │ export.py → S3 manifest (KMS encrypted) │ │
- │ └─────────────────┬────────────────────────┘ │
- │ ▼ │
- │ ┌──────────────────────────┐ ┌─────────────────────────────┐ │
- │ │ S3 Departures Bucket │────▶│ EventBridge Rule │ │
- │ │ (KMS, versioned) │ │ (S3 PutObject trigger) │ │
- │ └──────────────────────────┘ └──────────────┬──────────────┘ │
- │ │ │
- │ ┌───────────────▼───────────────┐ │
- │ │ Step Function │ │
- │ ┌───── VPC ─────────────────────────────────────────────────┐ │ │
- │ │ ┌─────────────────────┐ ┌──────────────────────────┐ │ │ │
- │ │ │ Parser Lambda │───▶│ Worker Lambda │ │ │ │
- │ │ │ (read-only IAM) │ │ (write, cross-account) │ │ │ │
- │ │ └─────────────────────┘ └────────────┬─────────────┘ │ │ │
- │ └──────────────────────────────────────────┼─────────────────┘ │ │
- │ ▼ │ │
- │ ┌──────────────────────────────────────────────┐ │
- │ │ Target Accounts (via STS AssumeRole) │ │
- │ │ 1. Revoke credentials 2. Strip perms │ │
- │ │ 3. Delete IAM user │ │
- │ └──────────────────────────────────────────────┘ │
- │ │
- │ ┌──────────────────────────────────────────────────────────────┐ │
- │ │ Audit: DynamoDB (per-user) + S3 (execution logs) │ │
- │ └──────────────────────────────────────────────────────────────┘ │
- └──────────────────────────────────────────────────────────────────────┘
+```mermaid
+flowchart TD
+ subgraph HR["External HR Data Sources"]
+ WD[Workday API]
+ SF[Snowflake SQL]
+ DB[Databricks Unity]
+ CH[ClickHouse SQL]
+ end
+
+ subgraph SEC["AWS Organization — Security OU Account"]
+ REC[Reconciler
SHA-256 change detect]
+ S3[S3 Manifest Bucket
KMS encrypted, versioned]
+ EB[EventBridge Rule
S3 PutObject trigger]
+
+ subgraph SFN["Step Function"]
+ L1[Lambda 1 — Parser
validate, grace period,
rehire filter]
+ L2[Lambda 2 — Worker
13-step IAM cleanup]
+ end
+
+ subgraph TGT["Target Accounts via STS AssumeRole"]
+ IAM[1. Revoke credentials]
+ STRIP[2. Strip permissions]
+ DEL[3. Delete IAM user]
+ end
+
+ AUDIT[Audit Trail
DynamoDB + S3 + warehouse]
+ end
+
+ subgraph CROSS["Cross-Cloud Workers"]
+ AZ[Azure Entra — 6 steps]
+ GCP[GCP IAM — 4+2 steps]
+ SNF[Snowflake — 6 steps]
+ DBX[Databricks SCIM — 4 steps]
+ end
+
+ WD --> REC
+ SF --> REC
+ DB --> REC
+ CH --> REC
+ REC -->|change detected| S3
+ S3 --> EB
+ EB --> L1
+ L1 --> L2
+ L2 --> IAM --> STRIP --> DEL
+ L2 --> AZ
+ L2 --> GCP
+ L2 --> SNF
+ L2 --> DBX
+ L2 --> AUDIT
+
+ style HR fill:#1e293b,stroke:#475569,color:#e2e8f0
+ style SEC fill:#0f172a,stroke:#334155,color:#e2e8f0
+ style SFN fill:#172554,stroke:#3b82f6,color:#e2e8f0
+ style TGT fill:#7f1d1d,stroke:#ef4444,color:#e2e8f0
+ style CROSS fill:#14532d,stroke:#22c55e,color:#e2e8f0
```
## Architecture — CSPM CIS Benchmarks
+```mermaid
+flowchart LR
+ subgraph CLOUD["Cloud Account / Project / Subscription"]
+ IAM[IAM / Identity]
+ STR[Storage / Buckets]
+ LOG[Logging + Audit]
+ NET[Network / Firewall]
+ AI[AI / ML Services]
+ end
+
+ CHK[checks.py
read-only SDK calls
no write permissions]
+
+ IAM --> CHK
+ STR --> CHK
+ LOG --> CHK
+ NET --> CHK
+ AI --> CHK
+
+ CHK --> JSON[JSON
per-control results]
+ CHK --> CON[Console
pass/fail summary]
+ CHK --> SARIF[SARIF
GitHub Security tab]
+
+ style CLOUD fill:#1e293b,stroke:#475569,color:#e2e8f0
+ style CHK fill:#172554,stroke:#3b82f6,color:#e2e8f0
```
- ┌──────────────────────────────────────────────────────────┐
- │ Cloud Account / Project / Subscription │
- │ │
- │ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │
- │ │ IAM / │ │ Storage │ │ Logging │ │ Network │ │
- │ │ Identity│ │ Buckets │ │ + Audit │ │ Firewall│ │
- │ └────┬────┘ └────┬────┘ └────┬────┘ └────┬────┘ │
- │ └────────────┴──────┬─────┴─────────────┘ │
- └───────────────────────────┼──────────────────────────────┘
- │ read-only SDK calls
- ▼
- ┌──────────────────────────┐
- │ checks.py │
- │ │
- │ AWS: 18 CIS v3.0 │
- │ GCP: 20 CIS + 5 AI │
- │ Azure: 19 CIS + 5 AI │
- │ │
- │ No write permissions │
- └──────────────┬───────────┘
- │
- ┌─────────────┼────────────┐
- ▼ ▼ ▼
- ┌──────────┐ ┌──────────┐ ┌──────────┐
- │ JSON │ │ Console │ │ SARIF │
- └──────────┘ └──────────┘ └──────────┘
+
+## Architecture — Vulnerability Remediation Pipeline
+
+```mermaid
+flowchart TD
+ SCAN[agent-bom scan
SARIF / JSON output]
+ S3[S3 Findings Bucket
KMS encrypted]
+ EB[EventBridge Rule]
+
+ subgraph SFN["Step Function"]
+ TRIAGE[Lambda 1 — Triage
EPSS + KEV + policy filter]
+ PATCH[Lambda 2 — Patcher
per-ecosystem fix]
+ end
+
+ subgraph ACTIONS["Remediation Actions"]
+ DEP[Dependency Upgrade
7 ecosystems
PR or direct apply]
+ CRED[Credential Rotation
Secrets Manager / Vault]
+ QUAR[MCP Server Quarantine
config rewrite + proxy deny]
+ end
+
+ NOTIFY[Notify
Slack / Teams / PagerDuty]
+ AUDITDB[DynamoDB Audit Trail]
+ VERIFY[Re-scan to confirm fix]
+
+ SCAN --> S3
+ S3 --> EB
+ EB --> TRIAGE
+ TRIAGE -->|P0: immediate| PATCH
+ TRIAGE -->|P1: urgent 4h| PATCH
+ TRIAGE -->|P2: standard 72h| PATCH
+ TRIAGE -->|P3: backlog| NOTIFY
+ PATCH --> DEP
+ PATCH --> CRED
+ PATCH --> QUAR
+ PATCH --> AUDITDB
+ PATCH --> NOTIFY
+ NOTIFY --> VERIFY
+
+ style SFN fill:#172554,stroke:#3b82f6,color:#e2e8f0
+ style ACTIONS fill:#14532d,stroke:#22c55e,color:#e2e8f0
```
## What's Inside
@@ -99,7 +150,7 @@ Production-ready cloud security automations — deployable code, CIS benchmark a
Fully deployable automation that reconciles HR termination data against cloud IAM and safely removes departed-employee access.
-**Pipeline**: HR source → Reconciler → S3 manifest → EventBridge → Step Function → Parser Lambda → Worker Lambda → Target Accounts
+**Pipeline**: HR source -> Reconciler -> S3 manifest -> EventBridge -> Step Function -> Parser Lambda -> Worker Lambda -> Target Accounts
Components
@@ -113,6 +164,7 @@ Fully deployable automation that reconciles HR termination data against cloud IA
| **CloudFormation** | `infra/cloudformation.yaml` | Full stack: roles, Lambdas, Step Function, S3, DynamoDB |
| **StackSets** | `infra/cross_account_stackset.yaml` | Org-wide cross-account remediation role |
| **IAM Policies** | `infra/iam_policies/` | Least-privilege policy documents per component |
+| **Terraform** | `infra/terraform/` | HCL alternative to CloudFormation |
| **Tests** | `tests/` | Unit tests covering parser, worker, reconciler, cross-cloud |
@@ -179,22 +231,55 @@ python skills/cspm-azure-cis-benchmark/src/checks.py --subscription-id SUB_ID
-## Security Model
+### vuln-remediation-pipeline
+
+Auto-remediate supply chain vulnerabilities found by [agent-bom](https://github.com/msaad00/agent-bom) — from scan findings to patched dependencies, rotated credentials, and quarantined MCP servers.
+```bash
+# Scan and export findings for the pipeline
+agent-bom scan -f sarif -o findings.sarif --enrich --fail-on-kev
+
+# Upload to S3 trigger bucket
+aws s3 cp findings.sarif s3://vuln-remediation-findings/incoming/
```
- ZERO TRUST LEAST PRIVILEGE DEFENSE IN DEPTH
- ┌────────────────┐ ┌────────────────┐ ┌────────────────┐
- │ Cross-account │ │ Parser: read │ │ Deny policies │
- │ scoped by │ │ Worker: scoped │ │ on root, │
- │ PrincipalOrgID │ │ write per │ │ break-glass-*, │
- │ │ │ component │ │ emergency-* │
- │ STS AssumeRole │ │ CSPM: read- │ │ │
- │ per account │ │ only audits │ │ KMS encryption │
- │ │ │ No write perms │ │ everywhere │
- │ VPC isolation │ │ for checks │ │ │
- └────────────────┘ └────────────────┘ │ Dual audit: │
- │ DDB + S3 + DW │
- └────────────────┘
+
+
+Triage tiers
+
+| Tier | Criteria | SLA | Action |
+|------|----------|-----|--------|
+| P0 | CISA KEV or CVSS >= 9.0 | 1h | Auto-patch + quarantine if needed |
+| P1 | CVSS >= 7.0 AND EPSS > 0.7 | 4h | Auto-patch, PR if risky |
+| P2 | CVSS >= 4.0 OR EPSS > 0.3 | 72h | Create PR for review |
+| P3 | CVSS < 4.0 AND EPSS < 0.3 | 30d | Notify, add to backlog |
+
+
+
+## Security Model
+
+```mermaid
+flowchart LR
+ subgraph ZT["Zero Trust"]
+ A1[Cross-account scoped
by PrincipalOrgID]
+ A2[STS AssumeRole
per account]
+ A3[VPC isolation]
+ end
+
+ subgraph LP["Least Privilege"]
+ B1[Parser: read-only IAM]
+ B2[Worker: scoped write
per component]
+ B3[CSPM: read-only
audits only]
+ end
+
+ subgraph DD["Defense in Depth"]
+ C1[Deny policies on
root, break-glass,
emergency accounts]
+ C2[KMS encryption
everywhere]
+ C3[Dual audit:
DDB + S3 + warehouse]
+ end
+
+ style ZT fill:#172554,stroke:#3b82f6,color:#e2e8f0
+ style LP fill:#14532d,stroke:#22c55e,color:#e2e8f0
+ style DD fill:#7f1d1d,stroke:#ef4444,color:#e2e8f0
```
## Compliance Framework Mapping
@@ -204,11 +289,14 @@ python skills/cspm-azure-cis-benchmark/src/checks.py --subscription-id SUB_ID
| **CIS AWS Foundations v3.0** | 18 controls (IAM, S3, CloudTrail, VPC) | `cspm-aws-cis-benchmark/` |
| **CIS GCP Foundations v3.0** | 20 controls + 5 Vertex AI | `cspm-gcp-cis-benchmark/` |
| **CIS Azure Foundations v2.1** | 19 controls + 5 AI Foundry | `cspm-azure-cis-benchmark/` |
-| **MITRE ATT&CK** | T1078.004, T1098.001, T1087.004, T1531, T1552 | Lambda docstrings |
+| **MITRE ATT&CK** | T1078.004, T1098.001, T1087.004, T1531, T1552, T1195.002, T1210 | Lambda docstrings |
| **NIST CSF 2.0** | PR.AC-1, PR.AC-4, DE.CM-3, RS.MI-2 | Lambda docstrings |
-| **CIS Controls v8** | 5.3, 6.1, 6.2, 6.5 | Lambda docstrings |
-| **SOC 2 TSC** | CC6.1, CC6.2, CC6.3 | Worker Lambda |
-| **ISO 27001:2022** | A.5.15–A.8.24 (12 controls) | CSPM check scripts |
+| **CIS Controls v8** | 5.3, 6.1, 6.2, 6.5, 7.1, 7.2, 7.3, 7.4, 16.1 | Worker + Patcher Lambdas |
+| **SOC 2 TSC** | CC6.1, CC6.2, CC6.3, CC7.1 | Worker + Triage Lambdas |
+| **ISO 27001:2022** | A.5.15-A.8.24 (12 controls) | CSPM check scripts |
+| **PCI DSS 4.0** | 2.2, 7.1, 8.3, 10.1 | CSPM check scripts |
+| **OWASP LLM Top 10** | LLM-05, LLM-07, LLM-08 | vuln-remediation-pipeline |
+| **OWASP MCP Top 10** | MCP-04 | vuln-remediation-pipeline |
## Multi-Cloud Support
@@ -235,6 +323,10 @@ python skills/cspm-aws-cis-benchmark/src/checks.py --region us-east-1
pip install google-cloud-iam google-cloud-storage google-cloud-compute
python skills/cspm-gcp-cis-benchmark/src/checks.py --project my-project
+# Run Azure CIS benchmark
+pip install azure-identity azure-mgmt-authorization azure-mgmt-storage azure-mgmt-monitor azure-mgmt-network
+python skills/cspm-azure-cis-benchmark/src/checks.py --subscription-id SUB_ID
+
# Run IAM departures tests
cd skills/iam-departures-remediation
pip install boto3 moto pytest
@@ -242,21 +334,31 @@ pytest tests/ -v
# Validate with agent-bom
pip install agent-bom
-agent-bom scan --aws --aws-cis-benchmark
+agent-bom skills scan skills/
```
## Integration with agent-bom
-This repo has the security automations. [agent-bom](https://github.com/msaad00/agent-bom) provides continuous scanning and compliance validation:
+This repo provides the security automations. [agent-bom](https://github.com/msaad00/agent-bom) provides continuous scanning and compliance validation:
| agent-bom Feature | Use Case |
|--------------------|----------|
-| `cis_benchmark` | Built-in CIS checks for AWS + Snowflake (continuous monitoring) |
+| `cis_benchmark` | Built-in CIS checks for AWS/GCP/Azure/Snowflake (continuous monitoring) |
| `scan --aws` | Discover Lambda dependencies, check for CVEs |
| `blast_radius` | Map impact of orphaned IAM credentials |
-| `compliance` | 10-framework compliance posture check |
+| `compliance` | 15-framework compliance posture check |
| `policy_check` | Policy-as-code gates for CI/CD |
+| `skills scan` | Scan skill files for security risks |
+| `graph` | Visualize cloud resource dependencies + attack paths |
+
+## Contributing
+
+See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines on adding new skills.
+
+## Security
+
+See [SECURITY.md](SECURITY.md) for vulnerability reporting policy.
## License
-Apache 2.0
+[Apache 2.0](LICENSE)
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..a4b61b4
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,38 @@
+# Security Policy
+
+## Reporting a Vulnerability
+
+If you discover a security vulnerability in this project, please report it responsibly.
+
+**Do NOT open a public GitHub issue for security vulnerabilities.**
+
+Instead, please email security findings to the maintainer or use [GitHub's private vulnerability reporting](https://github.com/msaad00/cloud-security/security/advisories/new).
+
+### What to include
+
+- Description of the vulnerability
+- Steps to reproduce
+- Potential impact
+- Suggested fix (if any)
+
+### Response timeline
+
+- **Acknowledgement**: Within 48 hours
+- **Assessment**: Within 7 days
+- **Fix**: Critical vulnerabilities patched within 14 days
+
+## Security practices in this repo
+
+- All credentials are loaded from environment variables, never hardcoded
+- CSPM skills use read-only cloud permissions (SecurityAudit / Viewer roles)
+- Remediation skills use least-privilege IAM with explicit deny policies on protected accounts
+- S3 artifacts are KMS-encrypted
+- Cross-account access is scoped by `aws:PrincipalOrgID`
+- All Lambda functions run in VPC with no public internet access (unless NAT required)
+
+## Supported versions
+
+| Version | Supported |
+|---------|-----------|
+| Latest main | Yes |
+| Feature branches | No |
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..ded45f2
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,27 @@
+[project]
+name = "cloud-security-skills"
+version = "0.2.0"
+description = "Cloud security skills collection — reusable security automation for AI agents"
+requires-python = ">=3.11"
+license = "Apache-2.0"
+
+[tool.ruff]
+target-version = "py311"
+line-length = 140
+
+[tool.ruff.lint]
+select = ["E", "F", "W", "I"]
+ignore = ["E501"] # Existing code uses long lines for audit log strings
+
+[tool.ruff.format]
+quote-style = "double"
+
+[tool.bandit]
+exclude_dirs = ["tests"]
+skips = ["B101"]
+
+[tool.pytest.ini_options]
+testpaths = ["skills"]
+python_files = "test_*.py"
+python_classes = "Test*"
+python_functions = "test_*"
diff --git a/skills/cspm-aws-cis-benchmark/src/checks.py b/skills/cspm-aws-cis-benchmark/src/checks.py
index ebb5a75..74d3b86 100644
--- a/skills/cspm-aws-cis-benchmark/src/checks.py
+++ b/skills/cspm-aws-cis-benchmark/src/checks.py
@@ -22,11 +22,11 @@
import boto3
from botocore.exceptions import ClientError
-
# ---------------------------------------------------------------------------
# Data model
# ---------------------------------------------------------------------------
+
@dataclass
class Finding:
control_id: str
@@ -44,22 +44,32 @@ class Finding:
# Section 1 — IAM
# ---------------------------------------------------------------------------
+
def check_1_1_root_mfa(iam) -> Finding:
"""CIS 1.1 — MFA on root account."""
try:
summary = iam.get_account_summary()["SummaryMap"]
has_mfa = summary.get("AccountMFAEnabled", 0) == 1
return Finding(
- control_id="1.1", title="MFA on root account", section="iam",
- severity="CRITICAL", status="PASS" if has_mfa else "FAIL",
+ control_id="1.1",
+ title="MFA on root account",
+ section="iam",
+ severity="CRITICAL",
+ status="PASS" if has_mfa else "FAIL",
detail="Root MFA enabled" if has_mfa else "Root account has no MFA",
- nist_csf="PR.AC-1", iso_27001="A.8.5",
+ nist_csf="PR.AC-1",
+ iso_27001="A.8.5",
)
except ClientError as e:
return Finding(
- control_id="1.1", title="MFA on root account", section="iam",
- severity="CRITICAL", status="ERROR", detail=str(e),
- nist_csf="PR.AC-1", iso_27001="A.8.5",
+ control_id="1.1",
+ title="MFA on root account",
+ section="iam",
+ severity="CRITICAL",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-1",
+ iso_27001="A.8.5",
)
@@ -77,16 +87,26 @@ def check_1_2_user_mfa(iam) -> Finding:
if not mfa_devices:
no_mfa.append(user["UserName"])
return Finding(
- control_id="1.2", title="MFA for console users", section="iam",
- severity="HIGH", status="FAIL" if no_mfa else "PASS",
+ control_id="1.2",
+ title="MFA for console users",
+ section="iam",
+ severity="HIGH",
+ status="FAIL" if no_mfa else "PASS",
detail=f"{len(no_mfa)} console users without MFA" if no_mfa else "All console users have MFA",
- nist_csf="PR.AC-1", iso_27001="A.8.5", resources=no_mfa,
+ nist_csf="PR.AC-1",
+ iso_27001="A.8.5",
+ resources=no_mfa,
)
except ClientError as e:
return Finding(
- control_id="1.2", title="MFA for console users", section="iam",
- severity="HIGH", status="ERROR", detail=str(e),
- nist_csf="PR.AC-1", iso_27001="A.8.5",
+ control_id="1.2",
+ title="MFA for console users",
+ section="iam",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-1",
+ iso_27001="A.8.5",
)
@@ -109,16 +129,26 @@ def check_1_3_stale_credentials(iam) -> Finding:
except (ValueError, IndexError):
pass
return Finding(
- control_id="1.3", title="Credentials unused 45+ days", section="iam",
- severity="MEDIUM", status="FAIL" if stale else "PASS",
+ control_id="1.3",
+ title="Credentials unused 45+ days",
+ section="iam",
+ severity="MEDIUM",
+ status="FAIL" if stale else "PASS",
detail=f"{len(stale)} users with stale credentials" if stale else "No stale credentials",
- nist_csf="PR.AC-1", iso_27001="A.5.18", resources=stale,
+ nist_csf="PR.AC-1",
+ iso_27001="A.5.18",
+ resources=stale,
)
except ClientError as e:
return Finding(
- control_id="1.3", title="Credentials unused 45+ days", section="iam",
- severity="MEDIUM", status="ERROR", detail=str(e),
- nist_csf="PR.AC-1", iso_27001="A.5.18",
+ control_id="1.3",
+ title="Credentials unused 45+ days",
+ section="iam",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-1",
+ iso_27001="A.5.18",
)
@@ -134,16 +164,26 @@ def check_1_4_key_rotation(iam) -> Finding:
if age > 90:
old_keys.append(f"{user['UserName']}:{key['AccessKeyId']} ({age}d)")
return Finding(
- control_id="1.4", title="Access keys rotated 90 days", section="iam",
- severity="MEDIUM", status="FAIL" if old_keys else "PASS",
+ control_id="1.4",
+ title="Access keys rotated 90 days",
+ section="iam",
+ severity="MEDIUM",
+ status="FAIL" if old_keys else "PASS",
detail=f"{len(old_keys)} keys older than 90 days" if old_keys else "All keys within 90 days",
- nist_csf="PR.AC-1", iso_27001="A.5.17", resources=old_keys,
+ nist_csf="PR.AC-1",
+ iso_27001="A.5.17",
+ resources=old_keys,
)
except ClientError as e:
return Finding(
- control_id="1.4", title="Access keys rotated 90 days", section="iam",
- severity="MEDIUM", status="ERROR", detail=str(e),
- nist_csf="PR.AC-1", iso_27001="A.5.17",
+ control_id="1.4",
+ title="Access keys rotated 90 days",
+ section="iam",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-1",
+ iso_27001="A.5.17",
)
@@ -163,16 +203,25 @@ def check_1_5_password_policy(iam) -> Finding:
if not policy.get("RequireLowercaseCharacters", False):
issues.append("RequireLowercase=false")
return Finding(
- control_id="1.5", title="Password policy strength", section="iam",
- severity="MEDIUM", status="FAIL" if issues else "PASS",
+ control_id="1.5",
+ title="Password policy strength",
+ section="iam",
+ severity="MEDIUM",
+ status="FAIL" if issues else "PASS",
detail="; ".join(issues) if issues else "Password policy meets CIS requirements",
- nist_csf="PR.AC-1", iso_27001="A.5.17",
+ nist_csf="PR.AC-1",
+ iso_27001="A.5.17",
)
except iam.exceptions.NoSuchEntityException:
return Finding(
- control_id="1.5", title="Password policy strength", section="iam",
- severity="MEDIUM", status="FAIL", detail="No password policy configured",
- nist_csf="PR.AC-1", iso_27001="A.5.17",
+ control_id="1.5",
+ title="Password policy strength",
+ section="iam",
+ severity="MEDIUM",
+ status="FAIL",
+ detail="No password policy configured",
+ nist_csf="PR.AC-1",
+ iso_27001="A.5.17",
)
@@ -182,16 +231,25 @@ def check_1_6_no_root_keys(iam) -> Finding:
summary = iam.get_account_summary()["SummaryMap"]
root_keys = summary.get("AccountAccessKeysPresent", 0)
return Finding(
- control_id="1.6", title="No root access keys", section="iam",
- severity="CRITICAL", status="PASS" if root_keys == 0 else "FAIL",
+ control_id="1.6",
+ title="No root access keys",
+ section="iam",
+ severity="CRITICAL",
+ status="PASS" if root_keys == 0 else "FAIL",
detail="No root access keys" if root_keys == 0 else f"Root has {root_keys} access key(s)",
- nist_csf="PR.AC-4", iso_27001="A.8.2",
+ nist_csf="PR.AC-4",
+ iso_27001="A.8.2",
)
except ClientError as e:
return Finding(
- control_id="1.6", title="No root access keys", section="iam",
- severity="CRITICAL", status="ERROR", detail=str(e),
- nist_csf="PR.AC-4", iso_27001="A.8.2",
+ control_id="1.6",
+ title="No root access keys",
+ section="iam",
+ severity="CRITICAL",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-4",
+ iso_27001="A.8.2",
)
@@ -204,16 +262,26 @@ def check_1_7_no_inline_policies(iam) -> Finding:
if policies:
inline_users.append(user["UserName"])
return Finding(
- control_id="1.7", title="No inline IAM policies", section="iam",
- severity="LOW", status="FAIL" if inline_users else "PASS",
+ control_id="1.7",
+ title="No inline IAM policies",
+ section="iam",
+ severity="LOW",
+ status="FAIL" if inline_users else "PASS",
detail=f"{len(inline_users)} users with inline policies" if inline_users else "No inline policies",
- nist_csf="PR.AC-4", iso_27001="A.5.15", resources=inline_users,
+ nist_csf="PR.AC-4",
+ iso_27001="A.5.15",
+ resources=inline_users,
)
except ClientError as e:
return Finding(
- control_id="1.7", title="No inline IAM policies", section="iam",
- severity="LOW", status="ERROR", detail=str(e),
- nist_csf="PR.AC-4", iso_27001="A.5.15",
+ control_id="1.7",
+ title="No inline IAM policies",
+ section="iam",
+ severity="LOW",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-4",
+ iso_27001="A.5.15",
)
@@ -221,6 +289,7 @@ def check_1_7_no_inline_policies(iam) -> Finding:
# Section 2 — Storage
# ---------------------------------------------------------------------------
+
def check_2_1_s3_encryption(s3) -> Finding:
"""CIS 2.1 — S3 default encryption."""
try:
@@ -233,16 +302,26 @@ def check_2_1_s3_encryption(s3) -> Finding:
if e.response["Error"]["Code"] == "ServerSideEncryptionConfigurationNotFoundError":
unencrypted.append(bucket["Name"])
return Finding(
- control_id="2.1", title="S3 default encryption", section="storage",
- severity="HIGH", status="FAIL" if unencrypted else "PASS",
+ control_id="2.1",
+ title="S3 default encryption",
+ section="storage",
+ severity="HIGH",
+ status="FAIL" if unencrypted else "PASS",
detail=f"{len(unencrypted)} buckets without encryption" if unencrypted else "All buckets encrypted",
- nist_csf="PR.DS-1", iso_27001="A.8.24", resources=unencrypted,
+ nist_csf="PR.DS-1",
+ iso_27001="A.8.24",
+ resources=unencrypted,
)
except ClientError as e:
return Finding(
- control_id="2.1", title="S3 default encryption", section="storage",
- severity="HIGH", status="ERROR", detail=str(e),
- nist_csf="PR.DS-1", iso_27001="A.8.24",
+ control_id="2.1",
+ title="S3 default encryption",
+ section="storage",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.DS-1",
+ iso_27001="A.8.24",
)
@@ -256,16 +335,26 @@ def check_2_2_s3_logging(s3) -> Finding:
if "LoggingEnabled" not in logging_config:
no_logging.append(bucket["Name"])
return Finding(
- control_id="2.2", title="S3 server access logging", section="storage",
- severity="MEDIUM", status="FAIL" if no_logging else "PASS",
+ control_id="2.2",
+ title="S3 server access logging",
+ section="storage",
+ severity="MEDIUM",
+ status="FAIL" if no_logging else "PASS",
detail=f"{len(no_logging)} buckets without logging" if no_logging else "All buckets have logging",
- nist_csf="DE.AE-3", iso_27001="A.8.15", resources=no_logging,
+ nist_csf="DE.AE-3",
+ iso_27001="A.8.15",
+ resources=no_logging,
)
except ClientError as e:
return Finding(
- control_id="2.2", title="S3 server access logging", section="storage",
- severity="MEDIUM", status="ERROR", detail=str(e),
- nist_csf="DE.AE-3", iso_27001="A.8.15",
+ control_id="2.2",
+ title="S3 server access logging",
+ section="storage",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="DE.AE-3",
+ iso_27001="A.8.15",
)
@@ -277,26 +366,40 @@ def check_2_3_s3_public_access(s3) -> Finding:
for bucket in buckets:
try:
pab = s3.get_public_access_block(Bucket=bucket["Name"])["PublicAccessBlockConfiguration"]
- if not all([
- pab.get("BlockPublicAcls", False),
- pab.get("IgnorePublicAcls", False),
- pab.get("BlockPublicPolicy", False),
- pab.get("RestrictPublicBuckets", False),
- ]):
+ if not all(
+ [
+ pab.get("BlockPublicAcls", False),
+ pab.get("IgnorePublicAcls", False),
+ pab.get("BlockPublicPolicy", False),
+ pab.get("RestrictPublicBuckets", False),
+ ]
+ ):
public_buckets.append(bucket["Name"])
except ClientError:
public_buckets.append(bucket["Name"])
return Finding(
- control_id="2.3", title="S3 public access blocked", section="storage",
- severity="CRITICAL", status="FAIL" if public_buckets else "PASS",
- detail=f"{len(public_buckets)} buckets without full public access block" if public_buckets else "All buckets block public access",
- nist_csf="PR.AC-3", iso_27001="A.8.3", resources=public_buckets,
+ control_id="2.3",
+ title="S3 public access blocked",
+ section="storage",
+ severity="CRITICAL",
+ status="FAIL" if public_buckets else "PASS",
+ detail=f"{len(public_buckets)} buckets without full public access block"
+ if public_buckets
+ else "All buckets block public access",
+ nist_csf="PR.AC-3",
+ iso_27001="A.8.3",
+ resources=public_buckets,
)
except ClientError as e:
return Finding(
- control_id="2.3", title="S3 public access blocked", section="storage",
- severity="CRITICAL", status="ERROR", detail=str(e),
- nist_csf="PR.AC-3", iso_27001="A.8.3",
+ control_id="2.3",
+ title="S3 public access blocked",
+ section="storage",
+ severity="CRITICAL",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-3",
+ iso_27001="A.8.3",
)
@@ -310,16 +413,26 @@ def check_2_4_s3_versioning(s3) -> Finding:
if versioning.get("Status") != "Enabled":
no_versioning.append(bucket["Name"])
return Finding(
- control_id="2.4", title="S3 versioning enabled", section="storage",
- severity="MEDIUM", status="FAIL" if no_versioning else "PASS",
+ control_id="2.4",
+ title="S3 versioning enabled",
+ section="storage",
+ severity="MEDIUM",
+ status="FAIL" if no_versioning else "PASS",
detail=f"{len(no_versioning)} buckets without versioning" if no_versioning else "All buckets versioned",
- nist_csf="PR.DS-1", iso_27001="A.8.13", resources=no_versioning,
+ nist_csf="PR.DS-1",
+ iso_27001="A.8.13",
+ resources=no_versioning,
)
except ClientError as e:
return Finding(
- control_id="2.4", title="S3 versioning enabled", section="storage",
- severity="MEDIUM", status="ERROR", detail=str(e),
- nist_csf="PR.DS-1", iso_27001="A.8.13",
+ control_id="2.4",
+ title="S3 versioning enabled",
+ section="storage",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.DS-1",
+ iso_27001="A.8.13",
)
@@ -327,6 +440,7 @@ def check_2_4_s3_versioning(s3) -> Finding:
# Section 3 — Logging
# ---------------------------------------------------------------------------
+
def check_3_1_cloudtrail_multiregion(ct) -> Finding:
"""CIS 3.1 — CloudTrail multi-region enabled."""
try:
@@ -338,16 +452,26 @@ def check_3_1_cloudtrail_multiregion(ct) -> Finding:
if status.get("IsLogging"):
active_mr.append(name)
return Finding(
- control_id="3.1", title="CloudTrail multi-region", section="logging",
- severity="CRITICAL", status="PASS" if active_mr else "FAIL",
+ control_id="3.1",
+ title="CloudTrail multi-region",
+ section="logging",
+ severity="CRITICAL",
+ status="PASS" if active_mr else "FAIL",
detail=f"{len(active_mr)} active multi-region trail(s)" if active_mr else "No active multi-region trail",
- nist_csf="DE.AE-3", iso_27001="A.8.15", resources=active_mr,
+ nist_csf="DE.AE-3",
+ iso_27001="A.8.15",
+ resources=active_mr,
)
except ClientError as e:
return Finding(
- control_id="3.1", title="CloudTrail multi-region", section="logging",
- severity="CRITICAL", status="ERROR", detail=str(e),
- nist_csf="DE.AE-3", iso_27001="A.8.15",
+ control_id="3.1",
+ title="CloudTrail multi-region",
+ section="logging",
+ severity="CRITICAL",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="DE.AE-3",
+ iso_27001="A.8.15",
)
@@ -357,16 +481,26 @@ def check_3_2_cloudtrail_validation(ct) -> Finding:
trails = ct.describe_trails()["trailList"]
no_validation = [t["Name"] for t in trails if not t.get("LogFileValidationEnabled")]
return Finding(
- control_id="3.2", title="CloudTrail log validation", section="logging",
- severity="HIGH", status="FAIL" if no_validation else "PASS",
+ control_id="3.2",
+ title="CloudTrail log validation",
+ section="logging",
+ severity="HIGH",
+ status="FAIL" if no_validation else "PASS",
detail=f"{len(no_validation)} trails without log validation" if no_validation else "All trails have log validation",
- nist_csf="PR.DS-6", iso_27001="A.8.15", resources=no_validation,
+ nist_csf="PR.DS-6",
+ iso_27001="A.8.15",
+ resources=no_validation,
)
except ClientError as e:
return Finding(
- control_id="3.2", title="CloudTrail log validation", section="logging",
- severity="HIGH", status="ERROR", detail=str(e),
- nist_csf="PR.DS-6", iso_27001="A.8.15",
+ control_id="3.2",
+ title="CloudTrail log validation",
+ section="logging",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.DS-6",
+ iso_27001="A.8.15",
)
@@ -381,26 +515,40 @@ def check_3_3_cloudtrail_s3_not_public(ct, s3) -> Finding:
continue
try:
pab = s3.get_public_access_block(Bucket=bucket)["PublicAccessBlockConfiguration"]
- if not all([
- pab.get("BlockPublicAcls", False),
- pab.get("IgnorePublicAcls", False),
- pab.get("BlockPublicPolicy", False),
- pab.get("RestrictPublicBuckets", False),
- ]):
+ if not all(
+ [
+ pab.get("BlockPublicAcls", False),
+ pab.get("IgnorePublicAcls", False),
+ pab.get("BlockPublicPolicy", False),
+ pab.get("RestrictPublicBuckets", False),
+ ]
+ ):
public_trail_buckets.append(bucket)
except ClientError:
public_trail_buckets.append(bucket)
return Finding(
- control_id="3.3", title="CloudTrail S3 not public", section="logging",
- severity="CRITICAL", status="FAIL" if public_trail_buckets else "PASS",
- detail=f"{len(public_trail_buckets)} trail buckets without public access block" if public_trail_buckets else "All trail buckets block public access",
- nist_csf="PR.AC-3", iso_27001="A.8.3", resources=public_trail_buckets,
+ control_id="3.3",
+ title="CloudTrail S3 not public",
+ section="logging",
+ severity="CRITICAL",
+ status="FAIL" if public_trail_buckets else "PASS",
+ detail=f"{len(public_trail_buckets)} trail buckets without public access block"
+ if public_trail_buckets
+ else "All trail buckets block public access",
+ nist_csf="PR.AC-3",
+ iso_27001="A.8.3",
+ resources=public_trail_buckets,
)
except ClientError as e:
return Finding(
- control_id="3.3", title="CloudTrail S3 not public", section="logging",
- severity="CRITICAL", status="ERROR", detail=str(e),
- nist_csf="PR.AC-3", iso_27001="A.8.3",
+ control_id="3.3",
+ title="CloudTrail S3 not public",
+ section="logging",
+ severity="CRITICAL",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-3",
+ iso_27001="A.8.3",
)
@@ -409,16 +557,25 @@ def check_3_4_cloudwatch_alarms(cw) -> Finding:
try:
alarms = cw.describe_alarms()["MetricAlarms"]
return Finding(
- control_id="3.4", title="CloudWatch alarms configured", section="logging",
- severity="MEDIUM", status="PASS" if alarms else "FAIL",
+ control_id="3.4",
+ title="CloudWatch alarms configured",
+ section="logging",
+ severity="MEDIUM",
+ status="PASS" if alarms else "FAIL",
detail=f"{len(alarms)} alarm(s) configured" if alarms else "No CloudWatch alarms configured",
- nist_csf="DE.CM-1", iso_27001="A.8.16",
+ nist_csf="DE.CM-1",
+ iso_27001="A.8.16",
)
except ClientError as e:
return Finding(
- control_id="3.4", title="CloudWatch alarms configured", section="logging",
- severity="MEDIUM", status="ERROR", detail=str(e),
- nist_csf="DE.CM-1", iso_27001="A.8.16",
+ control_id="3.4",
+ title="CloudWatch alarms configured",
+ section="logging",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="DE.CM-1",
+ iso_27001="A.8.16",
)
@@ -426,6 +583,7 @@ def check_3_4_cloudwatch_alarms(cw) -> Finding:
# Section 4 — Networking
# ---------------------------------------------------------------------------
+
def _check_unrestricted_port(ec2, port: int, control_id: str, title: str) -> Finding:
"""Check for 0.0.0.0/0 on a specific port in security groups."""
try:
@@ -443,16 +601,26 @@ def _check_unrestricted_port(ec2, port: int, control_id: str, title: str) -> Fin
if ip_range.get("CidrIpv6") == "::/0":
open_sgs.append(f"{sg['GroupId']} ({sg.get('GroupName', '')})")
return Finding(
- control_id=control_id, title=title, section="networking",
- severity="HIGH", status="FAIL" if open_sgs else "PASS",
+ control_id=control_id,
+ title=title,
+ section="networking",
+ severity="HIGH",
+ status="FAIL" if open_sgs else "PASS",
detail=f"{len(open_sgs)} SGs allow 0.0.0.0/0:{port}" if open_sgs else f"No SGs allow unrestricted port {port}",
- nist_csf="PR.AC-5", iso_27001="A.8.20", resources=open_sgs,
+ nist_csf="PR.AC-5",
+ iso_27001="A.8.20",
+ resources=open_sgs,
)
except ClientError as e:
return Finding(
- control_id=control_id, title=title, section="networking",
- severity="HIGH", status="ERROR", detail=str(e),
- nist_csf="PR.AC-5", iso_27001="A.8.20",
+ control_id=control_id,
+ title=title,
+ section="networking",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-5",
+ iso_27001="A.8.20",
)
@@ -474,16 +642,26 @@ def check_4_3_vpc_flow_logs(ec2) -> Finding:
vpc_ids_with_logs = {fl["ResourceId"] for fl in flow_logs if fl.get("ResourceId")}
no_logs = [v["VpcId"] for v in vpcs if v["VpcId"] not in vpc_ids_with_logs]
return Finding(
- control_id="4.3", title="VPC flow logs enabled", section="networking",
- severity="MEDIUM", status="FAIL" if no_logs else "PASS",
+ control_id="4.3",
+ title="VPC flow logs enabled",
+ section="networking",
+ severity="MEDIUM",
+ status="FAIL" if no_logs else "PASS",
detail=f"{len(no_logs)} VPCs without flow logs" if no_logs else "All VPCs have flow logs",
- nist_csf="DE.CM-1", iso_27001="A.8.16", resources=no_logs,
+ nist_csf="DE.CM-1",
+ iso_27001="A.8.16",
+ resources=no_logs,
)
except ClientError as e:
return Finding(
- control_id="4.3", title="VPC flow logs enabled", section="networking",
- severity="MEDIUM", status="ERROR", detail=str(e),
- nist_csf="DE.CM-1", iso_27001="A.8.16",
+ control_id="4.3",
+ title="VPC flow logs enabled",
+ section="networking",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="DE.CM-1",
+ iso_27001="A.8.16",
)
@@ -493,12 +671,21 @@ def check_4_3_vpc_flow_logs(ec2) -> Finding:
SECTIONS: dict[str, list] = {
"iam": [
- check_1_1_root_mfa, check_1_2_user_mfa, check_1_3_stale_credentials,
- check_1_4_key_rotation, check_1_5_password_policy, check_1_6_no_root_keys,
+ check_1_1_root_mfa,
+ check_1_2_user_mfa,
+ check_1_3_stale_credentials,
+ check_1_4_key_rotation,
+ check_1_5_password_policy,
+ check_1_6_no_root_keys,
check_1_7_no_inline_policies,
],
"storage": [check_2_1_s3_encryption, check_2_2_s3_logging, check_2_3_s3_public_access, check_2_4_s3_versioning],
- "logging": [check_3_1_cloudtrail_multiregion, check_3_2_cloudtrail_validation, check_3_3_cloudtrail_s3_not_public, check_3_4_cloudwatch_alarms],
+ "logging": [
+ check_3_1_cloudtrail_multiregion,
+ check_3_2_cloudtrail_validation,
+ check_3_3_cloudtrail_s3_not_public,
+ check_3_4_cloudwatch_alarms,
+ ],
"networking": [check_4_1_no_unrestricted_ssh, check_4_2_no_unrestricted_rdp, check_4_3_vpc_flow_logs],
}
@@ -556,9 +743,9 @@ def print_summary(findings: list[Finding]) -> None:
errors = sum(1 for f in findings if f.status == "ERROR")
total = len(findings)
- print(f"\n{'='*60}")
- print(f" CIS AWS Foundations v3.0 — Assessment Results")
- print(f"{'='*60}\n")
+ print(f"\n{'=' * 60}")
+ print(" CIS AWS Foundations v3.0 — Assessment Results")
+ print(f"{'=' * 60}\n")
current_section = ""
for f in findings:
@@ -575,11 +762,11 @@ def print_summary(findings: list[Finding]) -> None:
if len(f.resources) > 5:
print(f" ... and {len(f.resources) - 5} more")
- print(f"\n{'─'*60}")
+ print(f"\n{'─' * 60}")
pct = (passed / total * 100) if total else 0
print(f" Score: {passed}/{total} passed ({pct:.0f}%)")
print(f" PASS: {passed} FAIL: {failed} ERROR: {errors}")
- print(f"{'─'*60}\n")
+ print(f"{'─' * 60}\n")
def main():
diff --git a/skills/cspm-azure-cis-benchmark/src/checks.py b/skills/cspm-azure-cis-benchmark/src/checks.py
index 0c5670b..9371c09 100644
--- a/skills/cspm-azure-cis-benchmark/src/checks.py
+++ b/skills/cspm-azure-cis-benchmark/src/checks.py
@@ -18,11 +18,11 @@
import sys
from dataclasses import asdict, dataclass, field
-
# ---------------------------------------------------------------------------
# Data model
# ---------------------------------------------------------------------------
+
@dataclass
class Finding:
control_id: str
@@ -39,6 +39,7 @@ class Finding:
# Section 2 — Storage
# ---------------------------------------------------------------------------
+
def check_2_3_no_public_blob(storage_client, subscription_id: str) -> Finding:
"""CIS 2.3 — No public blob access."""
try:
@@ -48,15 +49,24 @@ def check_2_3_no_public_blob(storage_client, subscription_id: str) -> Finding:
if account.allow_blob_public_access:
public_accounts.append(account.name)
return Finding(
- control_id="2.3", title="No public blob access", section="storage",
- severity="CRITICAL", status="FAIL" if public_accounts else "PASS",
+ control_id="2.3",
+ title="No public blob access",
+ section="storage",
+ severity="CRITICAL",
+ status="FAIL" if public_accounts else "PASS",
detail=f"{len(public_accounts)} accounts allow public blob access" if public_accounts else "No public blob access",
- nist_csf="PR.AC-3", resources=public_accounts,
+ nist_csf="PR.AC-3",
+ resources=public_accounts,
)
except Exception as e:
return Finding(
- control_id="2.3", title="No public blob access", section="storage",
- severity="CRITICAL", status="ERROR", detail=str(e), nist_csf="PR.AC-3",
+ control_id="2.3",
+ title="No public blob access",
+ section="storage",
+ severity="CRITICAL",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-3",
)
@@ -69,15 +79,24 @@ def check_2_2_https_only(storage_client, subscription_id: str) -> Finding:
if not account.enable_https_traffic_only:
not_https.append(account.name)
return Finding(
- control_id="2.2", title="Storage HTTPS-only", section="storage",
- severity="HIGH", status="FAIL" if not_https else "PASS",
+ control_id="2.2",
+ title="Storage HTTPS-only",
+ section="storage",
+ severity="HIGH",
+ status="FAIL" if not_https else "PASS",
detail=f"{len(not_https)} accounts allow non-HTTPS" if not_https else "All accounts enforce HTTPS",
- nist_csf="PR.DS-2", resources=not_https,
+ nist_csf="PR.DS-2",
+ resources=not_https,
)
except Exception as e:
return Finding(
- control_id="2.2", title="Storage HTTPS-only", section="storage",
- severity="HIGH", status="ERROR", detail=str(e), nist_csf="PR.DS-2",
+ control_id="2.2",
+ title="Storage HTTPS-only",
+ section="storage",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.DS-2",
)
@@ -90,15 +109,24 @@ def check_2_4_network_rules(storage_client, subscription_id: str) -> Finding:
if account.network_rule_set and account.network_rule_set.default_action == "Allow":
open_accounts.append(account.name)
return Finding(
- control_id="2.4", title="Storage network deny-by-default", section="storage",
- severity="HIGH", status="FAIL" if open_accounts else "PASS",
+ control_id="2.4",
+ title="Storage network deny-by-default",
+ section="storage",
+ severity="HIGH",
+ status="FAIL" if open_accounts else "PASS",
detail=f"{len(open_accounts)} accounts default-allow" if open_accounts else "All accounts deny by default",
- nist_csf="PR.AC-5", resources=open_accounts,
+ nist_csf="PR.AC-5",
+ resources=open_accounts,
)
except Exception as e:
return Finding(
- control_id="2.4", title="Storage network deny-by-default", section="storage",
- severity="HIGH", status="ERROR", detail=str(e), nist_csf="PR.AC-5",
+ control_id="2.4",
+ title="Storage network deny-by-default",
+ section="storage",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-5",
)
@@ -106,6 +134,7 @@ def check_2_4_network_rules(storage_client, subscription_id: str) -> Finding:
# Section 4 — Networking
# ---------------------------------------------------------------------------
+
def check_4_1_no_unrestricted_ssh(network_client, subscription_id: str) -> Finding:
"""CIS 4.1 — No unrestricted SSH in NSGs."""
return _check_nsg_port(network_client, 22, "4.1", "No unrestricted SSH")
@@ -123,8 +152,7 @@ def _check_nsg_port(network_client, port: int, control_id: str, title: str) -> F
open_rules = []
for nsg in nsgs:
for rule in nsg.security_rules or []:
- if (rule.direction == "Inbound" and rule.access == "Allow"
- and rule.source_address_prefix in ("*", "0.0.0.0/0", "Internet")):
+ if rule.direction == "Inbound" and rule.access == "Allow" and rule.source_address_prefix in ("*", "0.0.0.0/0", "Internet"):
dest_ports = rule.destination_port_range or ""
if dest_ports == "*" or str(port) == dest_ports:
open_rules.append(f"{nsg.name}/{rule.name}")
@@ -136,15 +164,24 @@ def _check_nsg_port(network_client, port: int, control_id: str, title: str) -> F
except ValueError:
pass
return Finding(
- control_id=control_id, title=title, section="networking",
- severity="HIGH", status="FAIL" if open_rules else "PASS",
+ control_id=control_id,
+ title=title,
+ section="networking",
+ severity="HIGH",
+ status="FAIL" if open_rules else "PASS",
detail=f"{len(open_rules)} NSG rules allow 0.0.0.0/0:{port}" if open_rules else f"No unrestricted port {port}",
- nist_csf="PR.AC-5", resources=open_rules,
+ nist_csf="PR.AC-5",
+ resources=open_rules,
)
except Exception as e:
return Finding(
- control_id=control_id, title=title, section="networking",
- severity="HIGH", status="ERROR", detail=str(e), nist_csf="PR.AC-5",
+ control_id=control_id,
+ title=title,
+ section="networking",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-5",
)
@@ -154,20 +191,32 @@ def check_4_3_nsg_flow_logs(network_client, subscription_id: str) -> Finding:
watchers = list(network_client.network_watchers.list_all())
if not watchers:
return Finding(
- control_id="4.3", title="NSG flow logs enabled", section="networking",
- severity="MEDIUM", status="FAIL", detail="No Network Watchers found",
+ control_id="4.3",
+ title="NSG flow logs enabled",
+ section="networking",
+ severity="MEDIUM",
+ status="FAIL",
+ detail="No Network Watchers found",
nist_csf="DE.CM-1",
)
return Finding(
- control_id="4.3", title="NSG flow logs enabled", section="networking",
- severity="MEDIUM", status="PASS",
+ control_id="4.3",
+ title="NSG flow logs enabled",
+ section="networking",
+ severity="MEDIUM",
+ status="PASS",
detail=f"{len(watchers)} Network Watcher(s) found — verify flow logs per NSG",
nist_csf="DE.CM-1",
)
except Exception as e:
return Finding(
- control_id="4.3", title="NSG flow logs enabled", section="networking",
- severity="MEDIUM", status="ERROR", detail=str(e), nist_csf="DE.CM-1",
+ control_id="4.3",
+ title="NSG flow logs enabled",
+ section="networking",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="DE.CM-1",
)
@@ -175,6 +224,7 @@ def check_4_3_nsg_flow_logs(network_client, subscription_id: str) -> Finding:
# Runner
# ---------------------------------------------------------------------------
+
def _status_symbol(status: str) -> str:
return {"PASS": "\033[92m✓\033[0m", "FAIL": "\033[91m✗\033[0m", "ERROR": "\033[90m?\033[0m"}.get(status, "?")
@@ -186,8 +236,7 @@ def run_assessment(subscription_id: str, section: str | None = None) -> list[Fin
from azure.mgmt.network import NetworkManagementClient
from azure.mgmt.storage import StorageManagementClient
except ImportError:
- print("ERROR: Install Azure SDKs: pip install azure-identity "
- "azure-mgmt-storage azure-mgmt-network")
+ print("ERROR: Install Azure SDKs: pip install azure-identity azure-mgmt-storage azure-mgmt-network")
sys.exit(1)
credential = DefaultAzureCredential()
@@ -221,9 +270,9 @@ def print_summary(findings: list[Finding]) -> None:
passed = sum(1 for f in findings if f.status == "PASS")
total = len(findings)
- print(f"\n{'='*60}")
- print(f" CIS Azure Foundations v2.1 — Assessment Results")
- print(f"{'='*60}\n")
+ print(f"\n{'=' * 60}")
+ print(" CIS Azure Foundations v2.1 — Assessment Results")
+ print(f"{'=' * 60}\n")
current_section = ""
for f in findings:
@@ -237,9 +286,9 @@ def print_summary(findings: list[Finding]) -> None:
print(f" - {r}")
pct = (passed / total * 100) if total else 0
- print(f"\n{'─'*60}")
+ print(f"\n{'─' * 60}")
print(f" Score: {passed}/{total} passed ({pct:.0f}%)")
- print(f"{'─'*60}\n")
+ print(f"{'─' * 60}\n")
def main():
diff --git a/skills/cspm-gcp-cis-benchmark/src/checks.py b/skills/cspm-gcp-cis-benchmark/src/checks.py
index 7ebe7c4..7c8630a 100644
--- a/skills/cspm-gcp-cis-benchmark/src/checks.py
+++ b/skills/cspm-gcp-cis-benchmark/src/checks.py
@@ -19,11 +19,11 @@
from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
-
# ---------------------------------------------------------------------------
# Data model
# ---------------------------------------------------------------------------
+
@dataclass
class Finding:
control_id: str
@@ -40,6 +40,7 @@ class Finding:
# Section 1 — IAM
# ---------------------------------------------------------------------------
+
def check_1_1_no_gmail_accounts(crm_client, project_id: str) -> Finding:
"""CIS 1.1 — Corporate credentials only (no personal Gmail)."""
try:
@@ -50,15 +51,24 @@ def check_1_1_no_gmail_accounts(crm_client, project_id: str) -> Finding:
if "gmail.com" in member.lower():
gmail_members.append(f"{member} -> {binding.role}")
return Finding(
- control_id="1.1", title="No personal Gmail accounts", section="iam",
- severity="HIGH", status="FAIL" if gmail_members else "PASS",
+ control_id="1.1",
+ title="No personal Gmail accounts",
+ section="iam",
+ severity="HIGH",
+ status="FAIL" if gmail_members else "PASS",
detail=f"{len(gmail_members)} personal Gmail accounts in IAM" if gmail_members else "No personal Gmail accounts",
- nist_csf="PR.AC-1", resources=gmail_members,
+ nist_csf="PR.AC-1",
+ resources=gmail_members,
)
except Exception as e:
return Finding(
- control_id="1.1", title="No personal Gmail accounts", section="iam",
- severity="HIGH", status="ERROR", detail=str(e), nist_csf="PR.AC-1",
+ control_id="1.1",
+ title="No personal Gmail accounts",
+ section="iam",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-1",
)
@@ -69,21 +79,28 @@ def check_1_3_no_sa_keys(iam_client, project_id: str) -> Finding:
service_accounts = list(iam_client.list_service_accounts(request=request))
sas_with_keys = []
for sa in service_accounts:
- keys = list(iam_client.list_service_account_keys(
- request={"name": sa.name, "key_types": ["USER_MANAGED"]}
- ))
+ keys = list(iam_client.list_service_account_keys(request={"name": sa.name, "key_types": ["USER_MANAGED"]}))
if keys:
sas_with_keys.append(f"{sa.email} ({len(keys)} keys)")
return Finding(
- control_id="1.3", title="No user-managed SA keys", section="iam",
- severity="HIGH", status="FAIL" if sas_with_keys else "PASS",
+ control_id="1.3",
+ title="No user-managed SA keys",
+ section="iam",
+ severity="HIGH",
+ status="FAIL" if sas_with_keys else "PASS",
detail=f"{len(sas_with_keys)} SAs with user-managed keys" if sas_with_keys else "No user-managed keys found",
- nist_csf="PR.AC-1", resources=sas_with_keys,
+ nist_csf="PR.AC-1",
+ resources=sas_with_keys,
)
except Exception as e:
return Finding(
- control_id="1.3", title="No user-managed SA keys", section="iam",
- severity="HIGH", status="ERROR", detail=str(e), nist_csf="PR.AC-1",
+ control_id="1.3",
+ title="No user-managed SA keys",
+ section="iam",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-1",
)
@@ -95,23 +112,30 @@ def check_1_4_sa_key_rotation(iam_client, project_id: str) -> Finding:
service_accounts = list(iam_client.list_service_accounts(request=request))
old_keys = []
for sa in service_accounts:
- keys = list(iam_client.list_service_account_keys(
- request={"name": sa.name, "key_types": ["USER_MANAGED"]}
- ))
+ keys = list(iam_client.list_service_account_keys(request={"name": sa.name, "key_types": ["USER_MANAGED"]}))
for key in keys:
created = key.valid_after_time
if created and (now - created.replace(tzinfo=timezone.utc)).days > 90:
old_keys.append(f"{sa.email}: key {key.name.split('/')[-1]}")
return Finding(
- control_id="1.4", title="SA key rotation (90 days)", section="iam",
- severity="MEDIUM", status="FAIL" if old_keys else "PASS",
+ control_id="1.4",
+ title="SA key rotation (90 days)",
+ section="iam",
+ severity="MEDIUM",
+ status="FAIL" if old_keys else "PASS",
detail=f"{len(old_keys)} keys older than 90 days" if old_keys else "All keys within 90 days",
- nist_csf="PR.AC-1", resources=old_keys,
+ nist_csf="PR.AC-1",
+ resources=old_keys,
)
except Exception as e:
return Finding(
- control_id="1.4", title="SA key rotation (90 days)", section="iam",
- severity="MEDIUM", status="ERROR", detail=str(e), nist_csf="PR.AC-1",
+ control_id="1.4",
+ title="SA key rotation (90 days)",
+ section="iam",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-1",
)
@@ -119,6 +143,7 @@ def check_1_4_sa_key_rotation(iam_client, project_id: str) -> Finding:
# Section 2 — Storage
# ---------------------------------------------------------------------------
+
def check_2_3_no_public_buckets(storage_client, project_id: str) -> Finding:
"""CIS 2.3 — No public buckets."""
try:
@@ -130,15 +155,24 @@ def check_2_3_no_public_buckets(storage_client, project_id: str) -> Finding:
if "allUsers" in binding["members"] or "allAuthenticatedUsers" in binding["members"]:
public_buckets.append(f"{bucket.name} -> {binding['role']}")
return Finding(
- control_id="2.3", title="No public buckets", section="storage",
- severity="CRITICAL", status="FAIL" if public_buckets else "PASS",
+ control_id="2.3",
+ title="No public buckets",
+ section="storage",
+ severity="CRITICAL",
+ status="FAIL" if public_buckets else "PASS",
detail=f"{len(public_buckets)} public bucket bindings" if public_buckets else "No public buckets",
- nist_csf="PR.AC-3", resources=public_buckets,
+ nist_csf="PR.AC-3",
+ resources=public_buckets,
)
except Exception as e:
return Finding(
- control_id="2.3", title="No public buckets", section="storage",
- severity="CRITICAL", status="ERROR", detail=str(e), nist_csf="PR.AC-3",
+ control_id="2.3",
+ title="No public buckets",
+ section="storage",
+ severity="CRITICAL",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-3",
)
@@ -151,15 +185,24 @@ def check_2_1_uniform_access(storage_client, project_id: str) -> Finding:
if not bucket.iam_configuration.uniform_bucket_level_access_enabled:
legacy_acl.append(bucket.name)
return Finding(
- control_id="2.1", title="Uniform bucket-level access", section="storage",
- severity="HIGH", status="FAIL" if legacy_acl else "PASS",
+ control_id="2.1",
+ title="Uniform bucket-level access",
+ section="storage",
+ severity="HIGH",
+ status="FAIL" if legacy_acl else "PASS",
detail=f"{len(legacy_acl)} buckets with legacy ACL" if legacy_acl else "All buckets use uniform access",
- nist_csf="PR.AC-3", resources=legacy_acl,
+ nist_csf="PR.AC-3",
+ resources=legacy_acl,
)
except Exception as e:
return Finding(
- control_id="2.1", title="Uniform bucket-level access", section="storage",
- severity="HIGH", status="ERROR", detail=str(e), nist_csf="PR.AC-3",
+ control_id="2.1",
+ title="Uniform bucket-level access",
+ section="storage",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-3",
)
@@ -167,6 +210,7 @@ def check_2_1_uniform_access(storage_client, project_id: str) -> Finding:
# Section 4 — Networking
# ---------------------------------------------------------------------------
+
def check_4_2_no_unrestricted_ssh_rdp(compute_client, project_id: str) -> Finding:
"""CIS 4.2 — No unrestricted SSH/RDP in firewall rules."""
try:
@@ -187,15 +231,24 @@ def check_4_2_no_unrestricted_ssh_rdp(compute_client, project_id: str) -> Findin
if (22 in ports or 3389 in ports) and "0.0.0.0/0" in (rule.source_ranges or []):
open_rules.append(f"{rule.name}: {allowed.I_p_protocol}/{','.join(allowed.ports or [])}")
return Finding(
- control_id="4.2", title="No unrestricted SSH/RDP", section="networking",
- severity="HIGH", status="FAIL" if open_rules else "PASS",
+ control_id="4.2",
+ title="No unrestricted SSH/RDP",
+ section="networking",
+ severity="HIGH",
+ status="FAIL" if open_rules else "PASS",
detail=f"{len(open_rules)} rules allow 0.0.0.0/0 on SSH/RDP" if open_rules else "No unrestricted SSH/RDP",
- nist_csf="PR.AC-5", resources=open_rules,
+ nist_csf="PR.AC-5",
+ resources=open_rules,
)
except Exception as e:
return Finding(
- control_id="4.2", title="No unrestricted SSH/RDP", section="networking",
- severity="HIGH", status="ERROR", detail=str(e), nist_csf="PR.AC-5",
+ control_id="4.2",
+ title="No unrestricted SSH/RDP",
+ section="networking",
+ severity="HIGH",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="PR.AC-5",
)
@@ -209,15 +262,24 @@ def check_4_3_vpc_flow_logs(compute_client, project_id: str) -> Finding:
subnets.append(subnet)
no_logs = [s.name for s in subnets if not getattr(s, "log_config", None) or not s.log_config.enable]
return Finding(
- control_id="4.3", title="VPC flow logs on all subnets", section="networking",
- severity="MEDIUM", status="FAIL" if no_logs else "PASS",
+ control_id="4.3",
+ title="VPC flow logs on all subnets",
+ section="networking",
+ severity="MEDIUM",
+ status="FAIL" if no_logs else "PASS",
detail=f"{len(no_logs)} subnets without flow logs" if no_logs else "All subnets have flow logs",
- nist_csf="DE.CM-1", resources=no_logs,
+ nist_csf="DE.CM-1",
+ resources=no_logs,
)
except Exception as e:
return Finding(
- control_id="4.3", title="VPC flow logs on all subnets", section="networking",
- severity="MEDIUM", status="ERROR", detail=str(e), nist_csf="DE.CM-1",
+ control_id="4.3",
+ title="VPC flow logs on all subnets",
+ section="networking",
+ severity="MEDIUM",
+ status="ERROR",
+ detail=str(e),
+ nist_csf="DE.CM-1",
)
@@ -225,6 +287,7 @@ def check_4_3_vpc_flow_logs(compute_client, project_id: str) -> Finding:
# Runner
# ---------------------------------------------------------------------------
+
def _status_symbol(status: str) -> str:
return {"PASS": "\033[92m✓\033[0m", "FAIL": "\033[91m✗\033[0m", "ERROR": "\033[90m?\033[0m"}.get(status, "?")
@@ -236,8 +299,9 @@ def run_assessment(project_id: str, section: str | None = None) -> list[Finding]
from google.cloud.compute_v1.services.firewalls import FirewallsClient
from google.cloud.compute_v1.services.subnetworks import SubnetworksClient
except ImportError:
- print("ERROR: Install GCP SDKs: pip install google-cloud-iam google-cloud-storage "
- "google-cloud-resource-manager google-cloud-compute")
+ print(
+ "ERROR: Install GCP SDKs: pip install google-cloud-iam google-cloud-storage google-cloud-resource-manager google-cloud-compute"
+ )
sys.exit(1)
crm = resourcemanager_v3.ProjectsClient()
@@ -274,12 +338,11 @@ def run_assessment(project_id: str, section: str | None = None) -> list[Finding]
def print_summary(findings: list[Finding]) -> None:
passed = sum(1 for f in findings if f.status == "PASS")
- failed = sum(1 for f in findings if f.status == "FAIL")
total = len(findings)
- print(f"\n{'='*60}")
- print(f" CIS GCP Foundations v3.0 — Assessment Results")
- print(f"{'='*60}\n")
+ print(f"\n{'=' * 60}")
+ print(" CIS GCP Foundations v3.0 — Assessment Results")
+ print(f"{'=' * 60}\n")
current_section = ""
for f in findings:
@@ -293,9 +356,9 @@ def print_summary(findings: list[Finding]) -> None:
print(f" - {r}")
pct = (passed / total * 100) if total else 0
- print(f"\n{'─'*60}")
+ print(f"\n{'─' * 60}")
print(f" Score: {passed}/{total} passed ({pct:.0f}%)")
- print(f"{'─'*60}\n")
+ print(f"{'─' * 60}\n")
def main():
diff --git a/skills/iam-departures-remediation/src/lambda_parser/handler.py b/skills/iam-departures-remediation/src/lambda_parser/handler.py
index 917a418..2d34eac 100644
--- a/skills/iam-departures-remediation/src/lambda_parser/handler.py
+++ b/skills/iam-departures-remediation/src/lambda_parser/handler.py
@@ -24,7 +24,7 @@
import json
import logging
import os
-from datetime import datetime, timezone, timedelta
+from datetime import datetime, timedelta, timezone
from typing import Any
import boto3
@@ -76,17 +76,21 @@ def handler(event: dict, context: Any) -> dict:
if result["action"] == "remediate":
validated.append(result["entry"])
else:
- skipped.append({
+ skipped.append(
+ {
+ "email": entry.get("email", ""),
+ "iam_username": entry.get("iam_username", ""),
+ "reason": result["reason"],
+ }
+ )
+ except Exception as exc:
+ errors.append(
+ {
"email": entry.get("email", ""),
"iam_username": entry.get("iam_username", ""),
- "reason": result["reason"],
- })
- except Exception as exc:
- errors.append({
- "email": entry.get("email", ""),
- "iam_username": entry.get("iam_username", ""),
- "error": str(exc),
- })
+ "error": str(exc),
+ }
+ )
logger.exception("Validation error for %s", entry.get("email", ""))
summary = {
diff --git a/skills/iam-departures-remediation/src/lambda_worker/clouds/azure_entra.py b/skills/iam-departures-remediation/src/lambda_worker/clouds/azure_entra.py
index d14edb5..4887474 100644
--- a/skills/iam-departures-remediation/src/lambda_worker/clouds/azure_entra.py
+++ b/skills/iam-departures-remediation/src/lambda_worker/clouds/azure_entra.py
@@ -155,7 +155,9 @@ async def _revoke_sessions(client, user_id: str, step: int) -> RemediationStep:
return RemediationStep(step_number=step, action="revoke_sign_in_sessions", target=user_id, detail="All sessions revoked")
except Exception as e:
logger.warning("Failed to revoke sessions for %s: %s", user_id, e)
- return RemediationStep(step_number=step, action="revoke_sign_in_sessions", target=user_id, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="revoke_sign_in_sessions", target=user_id, status=RemediationStatus.FAILED, error=str(e)
+ )
async def _remove_group_memberships(client, user_id: str, step: int) -> RemediationStep:
@@ -178,12 +180,16 @@ async def _remove_group_memberships(client, user_id: str, step: int) -> Remediat
# Dynamic groups can't be manually modified
skipped += 1
return RemediationStep(
- step_number=step, action="remove_group_memberships", target=user_id,
+ step_number=step,
+ action="remove_group_memberships",
+ target=user_id,
detail=f"Removed from {removed} groups, skipped {skipped} (dynamic/protected)",
)
except Exception as e:
logger.warning("Failed to remove group memberships for %s: %s", user_id, e)
- return RemediationStep(step_number=step, action="remove_group_memberships", target=user_id, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="remove_group_memberships", target=user_id, status=RemediationStatus.FAILED, error=str(e)
+ )
async def _remove_app_role_assignments(client, user_id: str, step: int) -> RemediationStep:
@@ -195,12 +201,16 @@ async def _remove_app_role_assignments(client, user_id: str, step: int) -> Remed
await client.users.by_user_id(user_id).app_role_assignments.by_app_role_assignment_id(assignment.id).delete()
removed += 1
return RemediationStep(
- step_number=step, action="remove_app_role_assignments", target=user_id,
+ step_number=step,
+ action="remove_app_role_assignments",
+ target=user_id,
detail=f"Removed {removed} app role assignments",
)
except Exception as e:
logger.warning("Failed to remove app role assignments for %s: %s", user_id, e)
- return RemediationStep(step_number=step, action="remove_app_role_assignments", target=user_id, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="remove_app_role_assignments", target=user_id, status=RemediationStatus.FAILED, error=str(e)
+ )
async def _revoke_oauth2_grants(client, user_id: str, step: int) -> RemediationStep:
@@ -212,12 +222,16 @@ async def _revoke_oauth2_grants(client, user_id: str, step: int) -> RemediationS
await client.oauth2_permission_grants.by_o_auth2_permission_grant_id(grant.id).delete()
revoked += 1
return RemediationStep(
- step_number=step, action="revoke_oauth2_grants", target=user_id,
+ step_number=step,
+ action="revoke_oauth2_grants",
+ target=user_id,
detail=f"Revoked {revoked} OAuth2 delegated permission grants",
)
except Exception as e:
logger.warning("Failed to revoke OAuth2 grants for %s: %s", user_id, e)
- return RemediationStep(step_number=step, action="revoke_oauth2_grants", target=user_id, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="revoke_oauth2_grants", target=user_id, status=RemediationStatus.FAILED, error=str(e)
+ )
async def _disable_user(client, user_id: str, step: int) -> RemediationStep:
@@ -239,7 +253,9 @@ async def _delete_user(client, user_id: str, step: int) -> RemediationStep:
try:
await client.users.by_user_id(user_id).delete()
return RemediationStep(
- step_number=step, action="delete_user", target=user_id,
+ step_number=step,
+ action="delete_user",
+ target=user_id,
detail="User soft-deleted (30-day recycle bin)",
)
except Exception as e:
diff --git a/skills/iam-departures-remediation/src/lambda_worker/clouds/databricks_scim.py b/skills/iam-departures-remediation/src/lambda_worker/clouds/databricks_scim.py
index 9a2e4ad..a7bad12 100644
--- a/skills/iam-departures-remediation/src/lambda_worker/clouds/databricks_scim.py
+++ b/skills/iam-departures-remediation/src/lambda_worker/clouds/databricks_scim.py
@@ -121,8 +121,12 @@ async def remediate_user(
# Step 4: Delete at account level (cascades to all workspaces)
result.steps.append(_delete_account_user(username, step=4))
else:
- result.steps.append(RemediationStep(step_number=3, action="deactivate_account_user", target=username, detail="Skipped (workspace_only mode)"))
- result.steps.append(RemediationStep(step_number=4, action="delete_account_user", target=username, detail="Skipped (workspace_only mode)"))
+ result.steps.append(
+ RemediationStep(step_number=3, action="deactivate_account_user", target=username, detail="Skipped (workspace_only mode)")
+ )
+ result.steps.append(
+ RemediationStep(step_number=4, action="delete_account_user", target=username, detail="Skipped (workspace_only mode)")
+ )
result.complete()
return result
@@ -149,7 +153,9 @@ def _revoke_pats(username: str, step: int) -> RemediationStep:
ws.token_management.delete(token_id=token_info.token_id)
revoked += 1
return RemediationStep(
- step_number=step, action="revoke_pats", target=username,
+ step_number=step,
+ action="revoke_pats",
+ target=username,
detail=f"Revoked {revoked} personal access tokens",
)
except Exception as e:
@@ -169,7 +175,9 @@ def _deactivate_workspace_user(username: str, step: int) -> RemediationStep:
# Find user by email
user = _find_workspace_user(ws, username)
if user is None:
- return RemediationStep(step_number=step, action="deactivate_workspace_user", target=username, detail="User not found in workspace, skipped")
+ return RemediationStep(
+ step_number=step, action="deactivate_workspace_user", target=username, detail="User not found in workspace, skipped"
+ )
ws.users.patch(
id=user.id,
@@ -182,10 +190,14 @@ def _deactivate_workspace_user(username: str, step: int) -> RemediationStep:
],
schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
)
- return RemediationStep(step_number=step, action="deactivate_workspace_user", target=username, detail=f"User {user.id} deactivated in workspace")
+ return RemediationStep(
+ step_number=step, action="deactivate_workspace_user", target=username, detail=f"User {user.id} deactivated in workspace"
+ )
except Exception as e:
logger.warning("Failed to deactivate workspace user %s: %s", username, e)
- return RemediationStep(step_number=step, action="deactivate_workspace_user", target=username, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="deactivate_workspace_user", target=username, status=RemediationStatus.FAILED, error=str(e)
+ )
def _deactivate_account_user(username: str, step: int) -> RemediationStep:
@@ -199,7 +211,9 @@ def _deactivate_account_user(username: str, step: int) -> RemediationStep:
ac = _get_account_client()
user = _find_account_user(ac, username)
if user is None:
- return RemediationStep(step_number=step, action="deactivate_account_user", target=username, detail="User not found at account level, skipped")
+ return RemediationStep(
+ step_number=step, action="deactivate_account_user", target=username, detail="User not found at account level, skipped"
+ )
ac.users.patch(
id=user.id,
@@ -212,10 +226,14 @@ def _deactivate_account_user(username: str, step: int) -> RemediationStep:
],
schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
)
- return RemediationStep(step_number=step, action="deactivate_account_user", target=username, detail=f"User {user.id} deactivated at account level")
+ return RemediationStep(
+ step_number=step, action="deactivate_account_user", target=username, detail=f"User {user.id} deactivated at account level"
+ )
except Exception as e:
logger.warning("Failed to deactivate account user %s: %s", username, e)
- return RemediationStep(step_number=step, action="deactivate_account_user", target=username, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="deactivate_account_user", target=username, status=RemediationStatus.FAILED, error=str(e)
+ )
def _delete_account_user(username: str, step: int) -> RemediationStep:
@@ -229,16 +247,22 @@ def _delete_account_user(username: str, step: int) -> RemediationStep:
ac = _get_account_client()
user = _find_account_user(ac, username)
if user is None:
- return RemediationStep(step_number=step, action="delete_account_user", target=username, detail="User not found at account level, skipped")
+ return RemediationStep(
+ step_number=step, action="delete_account_user", target=username, detail="User not found at account level, skipped"
+ )
ac.users.delete(id=user.id)
return RemediationStep(
- step_number=step, action="delete_account_user", target=username,
+ step_number=step,
+ action="delete_account_user",
+ target=username,
detail=f"User {user.id} deleted at account level (cascades to all workspaces)",
)
except Exception as e:
logger.warning("Failed to delete account user %s: %s", username, e)
- return RemediationStep(step_number=step, action="delete_account_user", target=username, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="delete_account_user", target=username, status=RemediationStatus.FAILED, error=str(e)
+ )
def _find_workspace_user(ws, username: str):
diff --git a/skills/iam-departures-remediation/src/lambda_worker/clouds/gcp_iam.py b/skills/iam-departures-remediation/src/lambda_worker/clouds/gcp_iam.py
index 19c9c22..f495fbe 100644
--- a/skills/iam-departures-remediation/src/lambda_worker/clouds/gcp_iam.py
+++ b/skills/iam-departures-remediation/src/lambda_worker/clouds/gcp_iam.py
@@ -145,7 +145,9 @@ def _disable_service_account(client, sa_name: str, step: int) -> RemediationStep
return RemediationStep(step_number=step, action="disable_service_account", target=sa_name, detail="Service account disabled")
except Exception as e:
logger.warning("Failed to disable SA %s: %s", sa_name, e)
- return RemediationStep(step_number=step, action="disable_service_account", target=sa_name, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="disable_service_account", target=sa_name, status=RemediationStatus.FAILED, error=str(e)
+ )
def _delete_sa_keys(client, sa_name: str, step: int) -> RemediationStep:
@@ -167,7 +169,9 @@ def _delete_sa_keys(client, sa_name: str, step: int) -> RemediationStep:
skipped += 1 # SYSTEM_MANAGED — cannot delete
return RemediationStep(
- step_number=step, action="delete_sa_keys", target=sa_name,
+ step_number=step,
+ action="delete_sa_keys",
+ target=sa_name,
detail=f"Deleted {deleted} user-managed keys, skipped {skipped} system-managed",
)
except Exception as e:
@@ -205,12 +209,16 @@ def _remove_iam_bindings(principal: str, project_ids: list[str], step: int) -> R
rm_client.set_iam_policy(request=request)
return RemediationStep(
- step_number=step, action="remove_iam_bindings", target=principal,
+ step_number=step,
+ action="remove_iam_bindings",
+ target=principal,
detail=f"Removed {total_removed} bindings across {len(project_ids)} projects",
)
except Exception as e:
logger.warning("Failed to remove IAM bindings for %s: %s", principal, e)
- return RemediationStep(step_number=step, action="remove_iam_bindings", target=principal, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="remove_iam_bindings", target=principal, status=RemediationStatus.FAILED, error=str(e)
+ )
def _delete_service_account(client, sa_name: str, step: int) -> RemediationStep:
@@ -221,12 +229,16 @@ def _delete_service_account(client, sa_name: str, step: int) -> RemediationStep:
request = types.DeleteServiceAccountRequest(name=sa_name)
client.delete_service_account(request=request)
return RemediationStep(
- step_number=step, action="delete_service_account", target=sa_name,
+ step_number=step,
+ action="delete_service_account",
+ target=sa_name,
detail="Service account deleted (30-day undelete window)",
)
except Exception as e:
logger.warning("Failed to delete SA %s: %s", sa_name, e)
- return RemediationStep(step_number=step, action="delete_service_account", target=sa_name, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="delete_service_account", target=sa_name, status=RemediationStatus.FAILED, error=str(e)
+ )
async def remediate_workspace_user(
@@ -260,7 +272,9 @@ async def remediate_workspace_user(
if project_ids:
result.steps.append(_remove_iam_bindings(f"user:{email}", project_ids, step=1))
else:
- result.steps.append(RemediationStep(step_number=1, action="remove_iam_bindings", target=email, detail="No projects specified, skipped"))
+ result.steps.append(
+ RemediationStep(step_number=1, action="remove_iam_bindings", target=email, detail="No projects specified, skipped")
+ )
# Step 2: Delete via Admin SDK (20-day soft delete)
result.steps.append(_delete_workspace_user(email, step=2))
@@ -277,9 +291,13 @@ def _delete_workspace_user(email: str, step: int) -> RemediationStep:
service = build("admin", "directory_v1")
service.users().delete(userKey=email).execute()
return RemediationStep(
- step_number=step, action="delete_workspace_user", target=email,
+ step_number=step,
+ action="delete_workspace_user",
+ target=email,
detail="Workspace user deleted (20-day recovery window)",
)
except Exception as e:
logger.warning("Failed to delete Workspace user %s: %s", email, e)
- return RemediationStep(step_number=step, action="delete_workspace_user", target=email, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="delete_workspace_user", target=email, status=RemediationStatus.FAILED, error=str(e)
+ )
diff --git a/skills/iam-departures-remediation/src/lambda_worker/clouds/snowflake_user.py b/skills/iam-departures-remediation/src/lambda_worker/clouds/snowflake_user.py
index 1aa87b1..4e069c3 100644
--- a/skills/iam-departures-remediation/src/lambda_worker/clouds/snowflake_user.py
+++ b/skills/iam-departures-remediation/src/lambda_worker/clouds/snowflake_user.py
@@ -178,7 +178,9 @@ def _abort_queries(cursor, username: str, step: int) -> RemediationStep:
return RemediationStep(step_number=step, action="abort_active_queries", target=username, detail="All active queries aborted")
except Exception as e:
logger.warning("Failed to abort queries for %s: %s", username, e)
- return RemediationStep(step_number=step, action="abort_active_queries", target=username, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="abort_active_queries", target=username, status=RemediationStatus.FAILED, error=str(e)
+ )
def _disable_user(cursor, username: str, step: int) -> RemediationStep:
@@ -211,7 +213,9 @@ def _revoke_roles(cursor, username: str, step: int) -> RemediationStep:
skipped += 1
return RemediationStep(
- step_number=step, action="revoke_roles", target=username,
+ step_number=step,
+ action="revoke_roles",
+ target=username,
detail=f"Revoked {revoked} roles, skipped {skipped} (implicit/protected)",
)
except Exception as e:
@@ -246,20 +250,23 @@ def _transfer_ownership(cursor, username: str, target_role: str, step: int) -> R
for obj_type in _OWNERSHIP_OBJECT_TYPES:
try:
cursor.execute(
- f'GRANT OWNERSHIP ON ALL {obj_type} IN SCHEMA "{db}"."{schema}" '
- f'TO ROLE "{target_role}" COPY CURRENT GRANTS'
+ f'GRANT OWNERSHIP ON ALL {obj_type} IN SCHEMA "{db}"."{schema}" TO ROLE "{target_role}" COPY CURRENT GRANTS'
)
transferred += 1
except Exception:
errors += 1
return RemediationStep(
- step_number=step, action="transfer_ownership", target=username,
+ step_number=step,
+ action="transfer_ownership",
+ target=username,
detail=f"Transferred ownership in {transferred} schema/type combos, {errors} errors",
)
except Exception as e:
logger.warning("Failed to transfer ownership for %s: %s", username, e)
- return RemediationStep(step_number=step, action="transfer_ownership", target=username, status=RemediationStatus.FAILED, error=str(e))
+ return RemediationStep(
+ step_number=step, action="transfer_ownership", target=username, status=RemediationStatus.FAILED, error=str(e)
+ )
def _drop_user(cursor, username: str, step: int) -> RemediationStep:
@@ -279,8 +286,11 @@ def _verify_dropped(cursor, username: str, step: int) -> RemediationStep:
rows = cursor.fetchall()
if rows:
return RemediationStep(
- step_number=step, action="verify_dropped", target=username,
- status=RemediationStatus.FAILED, error="User still exists after DROP",
+ step_number=step,
+ action="verify_dropped",
+ target=username,
+ status=RemediationStatus.FAILED,
+ error="User still exists after DROP",
)
return RemediationStep(step_number=step, action="verify_dropped", target=username, detail="Confirmed: user no longer exists")
except Exception as e:
diff --git a/skills/iam-departures-remediation/src/lambda_worker/handler.py b/skills/iam-departures-remediation/src/lambda_worker/handler.py
index 10ba2f3..6d2d2fa 100644
--- a/skills/iam-departures-remediation/src/lambda_worker/handler.py
+++ b/skills/iam-departures-remediation/src/lambda_worker/handler.py
@@ -131,11 +131,13 @@ def handler(event: dict, context: Any) -> dict:
# Step 11: DELETE the IAM user (all deps removed)
iam.delete_user(UserName=iam_username)
- actions_taken.append({
- "action": "delete_user",
- "target": iam_username,
- "timestamp": _now(),
- })
+ actions_taken.append(
+ {
+ "action": "delete_user",
+ "target": iam_username,
+ "timestamp": _now(),
+ }
+ )
logger.info("Successfully deleted IAM user: %s", iam_username)
@@ -156,9 +158,7 @@ def handler(event: dict, context: Any) -> dict:
logger.exception("Remediation failed for %s in %s", iam_username, account_id)
# Still write audit — record the failure
- audit_record = _build_audit_record(
- entry, actions_taken, "error", error=str(exc)
- )
+ audit_record = _build_audit_record(entry, actions_taken, "error", error=str(exc))
_write_audit(audit_record)
return {
@@ -187,33 +187,39 @@ def _deactivate_access_keys(iam: Any, username: str, actions: list) -> None:
AccessKeyId=key_id,
Status="Inactive",
)
- actions.append({
- "action": "deactivate_access_key",
- "target": key_id,
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "deactivate_access_key",
+ "target": key_id,
+ "timestamp": _now(),
+ }
+ )
# Then delete (required before user deletion)
iam.delete_access_key(
UserName=username,
AccessKeyId=key_id,
)
- actions.append({
- "action": "delete_access_key",
- "target": key_id,
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "delete_access_key",
+ "target": key_id,
+ "timestamp": _now(),
+ }
+ )
def _delete_login_profile(iam: Any, username: str, actions: list) -> None:
"""Delete console login profile (password)."""
try:
iam.delete_login_profile(UserName=username)
- actions.append({
- "action": "delete_login_profile",
- "target": username,
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "delete_login_profile",
+ "target": username,
+ "timestamp": _now(),
+ }
+ )
except iam.exceptions.NoSuchEntityException:
pass # No login profile — console access was never enabled
@@ -228,11 +234,13 @@ def _remove_from_groups(iam: Any, username: str, actions: list) -> None:
GroupName=group_name,
UserName=username,
)
- actions.append({
- "action": "remove_from_group",
- "target": group_name,
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "remove_from_group",
+ "target": group_name,
+ "timestamp": _now(),
+ }
+ )
def _detach_managed_policies(iam: Any, username: str, actions: list) -> None:
@@ -244,11 +252,13 @@ def _detach_managed_policies(iam: Any, username: str, actions: list) -> None:
UserName=username,
PolicyArn=policy["PolicyArn"],
)
- actions.append({
- "action": "detach_managed_policy",
- "target": policy["PolicyArn"],
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "detach_managed_policy",
+ "target": policy["PolicyArn"],
+ "timestamp": _now(),
+ }
+ )
def _delete_inline_policies(iam: Any, username: str, actions: list) -> None:
@@ -260,11 +270,13 @@ def _delete_inline_policies(iam: Any, username: str, actions: list) -> None:
UserName=username,
PolicyName=policy_name,
)
- actions.append({
- "action": "delete_inline_policy",
- "target": policy_name,
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "delete_inline_policy",
+ "target": policy_name,
+ "timestamp": _now(),
+ }
+ )
def _delete_mfa_devices(iam: Any, username: str, actions: list) -> None:
@@ -283,11 +295,13 @@ def _delete_mfa_devices(iam: Any, username: str, actions: list) -> None:
iam.delete_virtual_mfa_device(SerialNumber=serial)
except iam.exceptions.NoSuchEntityException:
pass
- actions.append({
- "action": "delete_mfa_device",
- "target": serial,
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "delete_mfa_device",
+ "target": serial,
+ "timestamp": _now(),
+ }
+ )
def _delete_signing_certificates(iam: Any, username: str, actions: list) -> None:
@@ -299,11 +313,13 @@ def _delete_signing_certificates(iam: Any, username: str, actions: list) -> None
UserName=username,
CertificateId=cert["CertificateId"],
)
- actions.append({
- "action": "delete_signing_certificate",
- "target": cert["CertificateId"],
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "delete_signing_certificate",
+ "target": cert["CertificateId"],
+ "timestamp": _now(),
+ }
+ )
def _delete_ssh_keys(iam: Any, username: str, actions: list) -> None:
@@ -315,11 +331,13 @@ def _delete_ssh_keys(iam: Any, username: str, actions: list) -> None:
UserName=username,
SSHPublicKeyId=key["SSHPublicKeyId"],
)
- actions.append({
- "action": "delete_ssh_key",
- "target": key["SSHPublicKeyId"],
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "delete_ssh_key",
+ "target": key["SSHPublicKeyId"],
+ "timestamp": _now(),
+ }
+ )
def _delete_service_credentials(iam: Any, username: str, actions: list) -> None:
@@ -331,11 +349,13 @@ def _delete_service_credentials(iam: Any, username: str, actions: list) -> None:
UserName=username,
ServiceSpecificCredentialId=cred["ServiceSpecificCredentialId"],
)
- actions.append({
- "action": "delete_service_credential",
- "target": cred["ServiceSpecificCredentialId"],
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "delete_service_credential",
+ "target": cred["ServiceSpecificCredentialId"],
+ "timestamp": _now(),
+ }
+ )
except Exception:
pass # Some accounts may not support this API
@@ -354,12 +374,14 @@ def _tag_user_for_audit(iam: Any, username: str, entry: dict, actions: list) ->
]
try:
iam.tag_user(UserName=username, Tags=tags)
- actions.append({
- "action": "tag_user",
- "target": username,
- "tags": {t["Key"]: t["Value"] for t in tags},
- "timestamp": _now(),
- })
+ actions.append(
+ {
+ "action": "tag_user",
+ "target": username,
+ "tags": {t["Key"]: t["Value"] for t in tags},
+ "timestamp": _now(),
+ }
+ )
except Exception:
logger.warning("Failed to tag user %s before deletion", username)
@@ -408,12 +430,14 @@ def _write_audit(record: dict) -> None:
try:
dynamodb = boto3.resource("dynamodb")
table = dynamodb.Table(AUDIT_TABLE)
- table.put_item(Item={
- "pk": f"AUDIT#{record['account_id']}#{record['iam_username']}",
- "sk": record["audit_timestamp"],
- **{k: v for k, v in record.items() if v is not None and v != ""},
- "actions_taken": json.dumps(record.get("actions_taken", [])),
- })
+ table.put_item(
+ Item={
+ "pk": f"AUDIT#{record['account_id']}#{record['iam_username']}",
+ "sk": record["audit_timestamp"],
+ **{k: v for k, v in record.items() if v is not None and v != ""},
+ "actions_taken": json.dumps(record.get("actions_taken", [])),
+ }
+ )
except Exception:
logger.exception("Failed to write DynamoDB audit record")
diff --git a/skills/iam-departures-remediation/src/reconciler/__init__.py b/skills/iam-departures-remediation/src/reconciler/__init__.py
index 2b76ae6..3e72970 100644
--- a/skills/iam-departures-remediation/src/reconciler/__init__.py
+++ b/skills/iam-departures-remediation/src/reconciler/__init__.py
@@ -10,6 +10,8 @@
T1087.004 Account Discovery: Cloud Account — enumerates IAM users per account
"""
+from reconciler.change_detect import ChangeDetector
+from reconciler.export import S3Exporter
from reconciler.sources import (
ClickHouseSource,
DatabricksSource,
@@ -17,8 +19,6 @@
SnowflakeSource,
WorkdayAPISource,
)
-from reconciler.change_detect import ChangeDetector
-from reconciler.export import S3Exporter
__all__ = [
"HRSource",
diff --git a/skills/iam-departures-remediation/src/reconciler/sources.py b/skills/iam-departures-remediation/src/reconciler/sources.py
index 257d151..29421a6 100644
--- a/skills/iam-departures-remediation/src/reconciler/sources.py
+++ b/skills/iam-departures-remediation/src/reconciler/sources.py
@@ -422,10 +422,7 @@ def fetch_departures(self) -> list[DepartureRecord]:
client = self._get_client()
result = client.query(query)
columns = [col.lower() for col in result.column_names]
- return [
- self._row_to_record(dict(zip(columns, row)))
- for row in result.result_rows
- ]
+ return [self._row_to_record(dict(zip(columns, row))) for row in result.result_rows]
def _row_to_record(self, row: dict) -> DepartureRecord:
rehire_date = row.get("rehire_date")
diff --git a/skills/iam-departures-remediation/tests/test_cross_cloud_workers.py b/skills/iam-departures-remediation/tests/test_cross_cloud_workers.py
index 551d212..14bab53 100644
--- a/skills/iam-departures-remediation/tests/test_cross_cloud_workers.py
+++ b/skills/iam-departures-remediation/tests/test_cross_cloud_workers.py
@@ -11,10 +11,8 @@
from __future__ import annotations
import asyncio
-from unittest.mock import AsyncMock, MagicMock, patch
import pytest
-
from src.lambda_worker.clouds import (
CloudProvider,
RemediationResult,
@@ -22,7 +20,6 @@
RemediationStep,
)
-
# ── Shared fixtures ──────────────────────────────────────────────
@@ -87,9 +84,7 @@ class TestAzureEntra:
def test_dry_run_produces_all_steps(self):
from src.lambda_worker.clouds import azure_entra
- result = asyncio.get_event_loop().run_until_complete(
- azure_entra.remediate_user("user-id-123", "tenant-abc", dry_run=True)
- )
+ result = asyncio.get_event_loop().run_until_complete(azure_entra.remediate_user("user-id-123", "tenant-abc", dry_run=True))
assert result.status == RemediationStatus.DRY_RUN
assert result.cloud == CloudProvider.AZURE
assert len(result.steps) == 6
@@ -111,9 +106,7 @@ def test_required_permissions(self):
def test_identity_type_is_entra_user(self):
from src.lambda_worker.clouds import azure_entra
- result = asyncio.get_event_loop().run_until_complete(
- azure_entra.remediate_user("user@domain.com", "tenant-123", dry_run=True)
- )
+ result = asyncio.get_event_loop().run_until_complete(azure_entra.remediate_user("user@domain.com", "tenant-123", dry_run=True))
assert result.identity_type == "entra_user"
@@ -142,9 +135,7 @@ def test_sa_dry_run_produces_all_steps(self):
def test_workspace_user_dry_run(self):
from src.lambda_worker.clouds import gcp_iam
- result = asyncio.get_event_loop().run_until_complete(
- gcp_iam.remediate_workspace_user("user@domain.com", dry_run=True)
- )
+ result = asyncio.get_event_loop().run_until_complete(gcp_iam.remediate_workspace_user("user@domain.com", dry_run=True))
assert result.status == RemediationStatus.DRY_RUN
assert result.identity_type == "workspace_user"
assert len(result.steps) == 2
@@ -164,9 +155,7 @@ class TestSnowflake:
def test_dry_run_produces_all_steps(self):
from src.lambda_worker.clouds import snowflake_user
- result = asyncio.get_event_loop().run_until_complete(
- snowflake_user.remediate_user("departing_user", "myaccount", dry_run=True)
- )
+ result = asyncio.get_event_loop().run_until_complete(snowflake_user.remediate_user("departing_user", "myaccount", dry_run=True))
assert result.status == RemediationStatus.DRY_RUN
assert result.cloud == CloudProvider.SNOWFLAKE
assert len(result.steps) == 6
@@ -180,9 +169,7 @@ def test_dry_run_produces_all_steps(self):
def test_identity_type_is_snowflake_user(self):
from src.lambda_worker.clouds import snowflake_user
- result = asyncio.get_event_loop().run_until_complete(
- snowflake_user.remediate_user("test_user", "acct", dry_run=True)
- )
+ result = asyncio.get_event_loop().run_until_complete(snowflake_user.remediate_user("test_user", "acct", dry_run=True))
assert result.identity_type == "snowflake_user"
def test_implicit_roles_skipped(self):
@@ -208,9 +195,7 @@ class TestDatabricks:
def test_dry_run_produces_all_steps(self):
from src.lambda_worker.clouds import databricks_scim
- result = asyncio.get_event_loop().run_until_complete(
- databricks_scim.remediate_user("user@company.com", dry_run=True)
- )
+ result = asyncio.get_event_loop().run_until_complete(databricks_scim.remediate_user("user@company.com", dry_run=True))
assert result.status == RemediationStatus.DRY_RUN
assert result.cloud == CloudProvider.DATABRICKS
assert len(result.steps) == 4
@@ -239,9 +224,7 @@ def test_required_permissions(self):
def test_identity_type(self):
from src.lambda_worker.clouds import databricks_scim
- result = asyncio.get_event_loop().run_until_complete(
- databricks_scim.remediate_user("user@co.com", dry_run=True)
- )
+ result = asyncio.get_event_loop().run_until_complete(databricks_scim.remediate_user("user@co.com", dry_run=True))
assert result.identity_type == "databricks_user"
diff --git a/skills/iam-departures-remediation/tests/test_parser_lambda.py b/skills/iam-departures-remediation/tests/test_parser_lambda.py
index 0cfe2a1..c600d5f 100644
--- a/skills/iam-departures-remediation/tests/test_parser_lambda.py
+++ b/skills/iam-departures-remediation/tests/test_parser_lambda.py
@@ -8,11 +8,9 @@
from datetime import datetime, timedelta, timezone
from unittest.mock import MagicMock, patch
-import pytest
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))
-from lambda_parser.handler import handler, _validate_entry
+from lambda_parser.handler import _validate_entry, handler
def _now_iso() -> str:
@@ -152,11 +150,7 @@ def test_handler_processes_manifest(self, mock_boto3, mock_iam):
}
mock_s3 = MagicMock()
- mock_s3.get_object.return_value = {
- "Body": MagicMock(
- read=MagicMock(return_value=json.dumps(manifest).encode())
- )
- }
+ mock_s3.get_object.return_value = {"Body": MagicMock(read=MagicMock(return_value=json.dumps(manifest).encode()))}
mock_boto3.client.return_value = mock_s3
result = handler(
diff --git a/skills/iam-departures-remediation/tests/test_reconciler.py b/skills/iam-departures-remediation/tests/test_reconciler.py
index b06f49f..58e8d34 100644
--- a/skills/iam-departures-remediation/tests/test_reconciler.py
+++ b/skills/iam-departures-remediation/tests/test_reconciler.py
@@ -3,29 +3,29 @@
from __future__ import annotations
import json
+import os
+
+# We test the source code directly by adding the src dir to path
+import sys
from datetime import datetime, timedelta, timezone
from unittest.mock import MagicMock, patch
import pytest
-# We test the source code directly by adding the src dir to path
-import sys
-import os
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))
+from reconciler.change_detect import ChangeDetector
+from reconciler.export import S3Exporter
from reconciler.sources import (
DepartureRecord,
RemediationStatus,
TerminationSource,
get_source,
)
-from reconciler.change_detect import ChangeDetector
-from reconciler.export import S3Exporter
-
# ── Fixtures ────────────────────────────────────────────────────────
+
def _now() -> datetime:
return datetime.now(timezone.utc)
@@ -63,6 +63,7 @@ def _make_record(
# ── DepartureRecord Tests ──────────────────────────────────────────
+
class TestDepartureRecord:
"""Test the core DepartureRecord data model."""
@@ -180,6 +181,7 @@ def test_to_dict_serializable(self):
# ── ChangeDetector Tests ────────────────────────────────────────────
+
class TestChangeDetector:
"""Test change detection via content hashing."""
@@ -208,9 +210,7 @@ def test_same_data_not_changed(self):
expected_hash = detector.compute_hash(records)
# Mock S3 returning the same hash
- s3.get_object.return_value = {
- "Body": MagicMock(read=MagicMock(return_value=expected_hash.encode()))
- }
+ s3.get_object.return_value = {"Body": MagicMock(read=MagicMock(return_value=expected_hash.encode()))}
changed, _ = detector.has_changed(records)
assert changed is False
@@ -218,9 +218,7 @@ def test_same_data_not_changed(self):
def test_different_data_changed(self):
"""Different data → different hash → changed."""
s3 = MagicMock()
- s3.get_object.return_value = {
- "Body": MagicMock(read=MagicMock(return_value=b"old_hash_value_here"))
- }
+ s3.get_object.return_value = {"Body": MagicMock(read=MagicMock(return_value=b"old_hash_value_here"))}
detector = ChangeDetector(s3, "my-bucket")
records = [_make_record()]
@@ -253,6 +251,7 @@ def test_store_hash_uses_kms(self):
# ── S3Exporter Tests ────────────────────────────────────────────────
+
class TestS3Exporter:
"""Test S3 manifest export."""
@@ -300,6 +299,7 @@ def test_skip_reasons_categorized(self):
# ── Source Factory Tests ────────────────────────────────────────────
+
class TestSourceFactory:
"""Test the get_source factory."""
@@ -307,26 +307,35 @@ def test_unknown_source_raises(self):
with pytest.raises(ValueError, match="Unknown HR source"):
get_source("oracle")
- @patch.dict(os.environ, {
- "SNOWFLAKE_ACCOUNT": "test",
- "SNOWFLAKE_USER": "user",
- "SNOWFLAKE_PASSWORD": "pass",
- })
+ @patch.dict(
+ os.environ,
+ {
+ "SNOWFLAKE_ACCOUNT": "test",
+ "SNOWFLAKE_USER": "user",
+ "SNOWFLAKE_PASSWORD": "pass",
+ },
+ )
def test_snowflake_source_creation(self):
source = get_source("snowflake")
assert source.__class__.__name__ == "SnowflakeSource"
- @patch.dict(os.environ, {
- "DATABRICKS_HOST": "test.cloud.databricks.com",
- "DATABRICKS_TOKEN": "token",
- })
+ @patch.dict(
+ os.environ,
+ {
+ "DATABRICKS_HOST": "test.cloud.databricks.com",
+ "DATABRICKS_TOKEN": "token",
+ },
+ )
def test_databricks_source_creation(self):
source = get_source("databricks")
assert source.__class__.__name__ == "DatabricksSource"
- @patch.dict(os.environ, {
- "CLICKHOUSE_HOST": "test.clickhouse.cloud",
- })
+ @patch.dict(
+ os.environ,
+ {
+ "CLICKHOUSE_HOST": "test.clickhouse.cloud",
+ },
+ )
def test_clickhouse_source_creation(self):
source = get_source("clickhouse")
assert source.__class__.__name__ == "ClickHouseSource"
diff --git a/skills/iam-departures-remediation/tests/test_worker_lambda.py b/skills/iam-departures-remediation/tests/test_worker_lambda.py
index 2f58c4d..802c435 100644
--- a/skills/iam-departures-remediation/tests/test_worker_lambda.py
+++ b/skills/iam-departures-remediation/tests/test_worker_lambda.py
@@ -4,21 +4,18 @@
import os
import sys
-from datetime import datetime, timezone
-from unittest.mock import MagicMock, patch, call
-
-import pytest
+from unittest.mock import MagicMock, patch
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))
from lambda_worker.handler import (
- handler,
_deactivate_access_keys,
- _delete_login_profile,
- _remove_from_groups,
- _detach_managed_policies,
_delete_inline_policies,
+ _delete_login_profile,
_delete_mfa_devices,
+ _detach_managed_policies,
+ _remove_from_groups,
+ handler,
)
@@ -48,10 +45,12 @@ def test_deactivate_access_keys(self):
"""Should deactivate then delete all access keys."""
iam = MagicMock()
iam.get_paginator.return_value.paginate.return_value = [
- {"AccessKeyMetadata": [
- {"AccessKeyId": "AKIA111", "Status": "Active"},
- {"AccessKeyId": "AKIA222", "Status": "Active"},
- ]}
+ {
+ "AccessKeyMetadata": [
+ {"AccessKeyId": "AKIA111", "Status": "Active"},
+ {"AccessKeyId": "AKIA222", "Status": "Active"},
+ ]
+ }
]
actions = []
_deactivate_access_keys(iam, "jane", actions)
@@ -87,10 +86,12 @@ def test_delete_login_profile_not_found(self):
def test_remove_from_groups(self):
iam = MagicMock()
iam.get_paginator.return_value.paginate.return_value = [
- {"Groups": [
- {"GroupName": "developers"},
- {"GroupName": "admin"},
- ]}
+ {
+ "Groups": [
+ {"GroupName": "developers"},
+ {"GroupName": "admin"},
+ ]
+ }
]
actions = []
_remove_from_groups(iam, "jane", actions)
@@ -101,9 +102,11 @@ def test_remove_from_groups(self):
def test_detach_managed_policies(self):
iam = MagicMock()
iam.get_paginator.return_value.paginate.return_value = [
- {"AttachedPolicies": [
- {"PolicyName": "ReadOnly", "PolicyArn": "arn:aws:iam::aws:policy/ReadOnlyAccess"},
- ]}
+ {
+ "AttachedPolicies": [
+ {"PolicyName": "ReadOnly", "PolicyArn": "arn:aws:iam::aws:policy/ReadOnlyAccess"},
+ ]
+ }
]
actions = []
_detach_managed_policies(iam, "jane", actions)
@@ -113,9 +116,7 @@ def test_detach_managed_policies(self):
def test_delete_inline_policies(self):
iam = MagicMock()
- iam.get_paginator.return_value.paginate.return_value = [
- {"PolicyNames": ["custom-policy-1", "custom-policy-2"]}
- ]
+ iam.get_paginator.return_value.paginate.return_value = [{"PolicyNames": ["custom-policy-1", "custom-policy-2"]}]
actions = []
_delete_inline_policies(iam, "jane", actions)
@@ -125,9 +126,11 @@ def test_delete_inline_policies(self):
def test_delete_mfa_devices(self):
iam = MagicMock()
iam.get_paginator.return_value.paginate.return_value = [
- {"MFADevices": [
- {"SerialNumber": "arn:aws:iam::123:mfa/jane"},
- ]}
+ {
+ "MFADevices": [
+ {"SerialNumber": "arn:aws:iam::123:mfa/jane"},
+ ]
+ }
]
actions = []
_delete_mfa_devices(iam, "jane", actions)
@@ -156,12 +159,21 @@ def test_successful_remediation(self, mock_iam, mock_audit):
# Simplify — mock each paginator to return empty
def mock_paginate(*args, **kwargs):
paginator = MagicMock()
- paginator.paginate.return_value = iter([
- {"AccessKeyMetadata": [], "Groups": [], "AttachedPolicies": [],
- "PolicyNames": [], "MFADevices": [], "Certificates": [],
- "SSHPublicKeys": []}
- ])
+ paginator.paginate.return_value = iter(
+ [
+ {
+ "AccessKeyMetadata": [],
+ "Groups": [],
+ "AttachedPolicies": [],
+ "PolicyNames": [],
+ "MFADevices": [],
+ "Certificates": [],
+ "SSHPublicKeys": [],
+ }
+ ]
+ )
return paginator
+
iam.get_paginator.side_effect = mock_paginate
iam.list_service_specific_credentials.return_value = {"ServiceSpecificCredentials": []}
iam.exceptions.NoSuchEntityException = type("E", (Exception,), {})
diff --git a/skills/vuln-remediation-pipeline/src/lambda_patcher/handler.py b/skills/vuln-remediation-pipeline/src/lambda_patcher/handler.py
index e8f5d94..d45f46d 100644
--- a/skills/vuln-remediation-pipeline/src/lambda_patcher/handler.py
+++ b/skills/vuln-remediation-pipeline/src/lambda_patcher/handler.py
@@ -107,9 +107,7 @@ def patch_dependency(finding: dict[str, Any], mode: str = "pr") -> PatchResult:
return _create_pr(vuln_id, pkg, fixed, ecosystem, cmd)
-def _apply_direct(
- vuln_id: str, pkg: str, version: str, cmd: str
-) -> PatchResult:
+def _apply_direct(vuln_id: str, pkg: str, version: str, cmd: str) -> PatchResult:
"""Apply fix directly to the working tree (P0 only)."""
try:
result = subprocess.run(
@@ -144,9 +142,7 @@ def _apply_direct(
)
-def _create_pr(
- vuln_id: str, pkg: str, version: str, ecosystem: str, cmd: str
-) -> PatchResult:
+def _create_pr(vuln_id: str, pkg: str, version: str, ecosystem: str, cmd: str) -> PatchResult:
"""Create a PR with the dependency upgrade."""
branch = f"security/{vuln_id}-{pkg}-{version}".replace(":", "-").lower()
title = f"fix({ecosystem}): upgrade {pkg} to {version} [{vuln_id}]"
@@ -203,9 +199,7 @@ def _create_pr(
# ---------------------------------------------------------------------------
-def rotate_credential(
- credential_type: str, credential_id: str, server_name: str
-) -> PatchResult:
+def rotate_credential(credential_type: str, credential_id: str, server_name: str) -> PatchResult:
"""Rotate an exposed credential via Secrets Manager.
Deactivates old credential (does NOT delete) for rollback window.
@@ -216,9 +210,7 @@ def rotate_credential(
if credential_type == "aws_access_key":
return _rotate_aws_key(vuln_id, credential_id, server_name)
# All other types: use Secrets Manager rotation
- return _rotate_via_secrets_manager(
- vuln_id, credential_type, credential_id, server_name
- )
+ return _rotate_via_secrets_manager(vuln_id, credential_type, credential_id, server_name)
except Exception as e:
return PatchResult(
vuln_id=vuln_id,
@@ -229,9 +221,7 @@ def rotate_credential(
)
-def _rotate_aws_key(
- vuln_id: str, access_key_id: str, server_name: str
-) -> PatchResult:
+def _rotate_aws_key(vuln_id: str, access_key_id: str, server_name: str) -> PatchResult:
"""Rotate an AWS access key: create new → deactivate old."""
iam = boto3.client("iam")
@@ -253,9 +243,7 @@ def _rotate_aws_key(
new_key_id = new_key["AccessKey"]["AccessKeyId"]
# Deactivate old key (NOT delete — rollback window)
- iam.update_access_key(
- UserName=username, AccessKeyId=access_key_id, Status="Inactive"
- )
+ iam.update_access_key(UserName=username, AccessKeyId=access_key_id, Status="Inactive")
# Store new key in Secrets Manager for retrieval
sm = boto3.client("secretsmanager")
@@ -301,9 +289,7 @@ def _rotate_aws_key(
)
-def _rotate_via_secrets_manager(
- vuln_id: str, cred_type: str, cred_id: str, server_name: str
-) -> PatchResult:
+def _rotate_via_secrets_manager(vuln_id: str, cred_type: str, cred_id: str, server_name: str) -> PatchResult:
"""Trigger Secrets Manager rotation for non-AWS credentials."""
sm = boto3.client("secretsmanager")
secret_name = f"vuln-remediation/{server_name}/{cred_type}"
@@ -332,9 +318,7 @@ def _rotate_via_secrets_manager(
# ---------------------------------------------------------------------------
-def quarantine_server(
- server_name: str, vuln_id: str, reason: str
-) -> PatchResult:
+def quarantine_server(server_name: str, vuln_id: str, reason: str) -> PatchResult:
"""Quarantine an MCP server by tagging its config and logging the action.
Quarantine is reversible — when a fix becomes available, the pipeline
@@ -494,9 +478,7 @@ def handler(event: dict[str, Any], context: Any) -> dict[str, Any]:
# Handle credential findings if present
for cred in event.get("credentials", []):
- result = rotate_credential(
- cred["type"], cred["id"], cred["server_name"]
- )
+ result = rotate_credential(cred["type"], cred["id"], cred["server_name"])
results.append(result.to_dict())
_log_result(result, timestamp)
diff --git a/skills/vuln-remediation-pipeline/src/lambda_triage/handler.py b/skills/vuln-remediation-pipeline/src/lambda_triage/handler.py
index d5efaa1..13203a4 100644
--- a/skills/vuln-remediation-pipeline/src/lambda_triage/handler.py
+++ b/skills/vuln-remediation-pipeline/src/lambda_triage/handler.py
@@ -18,9 +18,7 @@
REMEDIATION_TABLE = os.environ.get("REMEDIATION_TABLE", "vuln-remediation-audit")
FINDINGS_BUCKET = os.environ.get("FINDINGS_BUCKET", "vuln-remediation-findings")
GRACE_PERIOD_HOURS = int(os.environ.get("GRACE_PERIOD_HOURS", "2"))
-PROTECTED_PACKAGES_SSM = os.environ.get(
- "PROTECTED_PACKAGES_SSM", "/vuln-remediation/protected-packages"
-)
+PROTECTED_PACKAGES_SSM = os.environ.get("PROTECTED_PACKAGES_SSM", "/vuln-remediation/protected-packages")
class Tier(str, Enum):
@@ -172,9 +170,7 @@ def _is_already_remediated(vuln_id: str, package_name: str) -> bool:
try:
ddb = boto3.resource("dynamodb")
table = ddb.Table(REMEDIATION_TABLE)
- resp = table.get_item(
- Key={"vuln_id": vuln_id, "package_name": package_name}
- )
+ resp = table.get_item(Key={"vuln_id": vuln_id, "package_name": package_name})
item = resp.get("Item")
if not item:
return False
@@ -275,9 +271,7 @@ def handler(event: dict[str, Any], context: Any) -> dict[str, Any]:
actionable = [t for t in triaged if t.tier != Tier.SKIP]
skipped = [t for t in triaged if t.tier == Tier.SKIP]
- logger.info(
- "Triage complete: %d actionable, %d skipped", len(actionable), len(skipped)
- )
+ logger.info("Triage complete: %d actionable, %d skipped", len(actionable), len(skipped))
# Log skipped to DynamoDB
_log_skipped(skipped)
diff --git a/skills/vuln-remediation-pipeline/src/orchestrator/ingest.py b/skills/vuln-remediation-pipeline/src/orchestrator/ingest.py
index 6fcb547..005b970 100644
--- a/skills/vuln-remediation-pipeline/src/orchestrator/ingest.py
+++ b/skills/vuln-remediation-pipeline/src/orchestrator/ingest.py
@@ -32,7 +32,7 @@ def load_findings(path: str | Path) -> dict[str, Any]:
path = Path(path)
if not path.exists():
raise FileNotFoundError(f"Findings file not found: {path}")
- if not path.suffix.lower() in (".json", ".sarif"):
+ if path.suffix.lower() not in (".json", ".sarif"):
raise ValueError(f"Unsupported file format: {path.suffix}")
with open(path) as f: