Compare commits
44 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8bc2dd71b2 | |||
| 4dd5baadf6 | |||
| 934d995661 | |||
| ccdc01ed7b | |||
| d84099e87a | |||
| cf55f7eb43 | |||
| 9293c7b58d | |||
| a883bb30d4 | |||
| e476bbde2d | |||
| 7f3dcdf02f | |||
| 132e79df89 | |||
| b2ed9ee221 | |||
| def2d3d188 | |||
| 1090ed59b7 | |||
| 67e4b1a082 | |||
| 7478ec9420 | |||
| a30b6623ed | |||
| 15bc317ec4 | |||
| 1536102784 | |||
| 1b99550572 | |||
| 6dfa135755 | |||
| 80482da1cb | |||
| 9cedbd3582 | |||
| c3d1c5c5f7 | |||
| 1fd6c51af6 | |||
| adbfc0bcd1 | |||
| 8f041f6f52 | |||
| 1b0e12ec51 | |||
| 759f7b84d6 | |||
| 0b26c1a39c | |||
| fc7fbddfe7 | |||
| 500b395125 | |||
| a1961d6d5f | |||
| a7e988c361 | |||
| 02cdcb29db | |||
| 6e0d7866cd | |||
| 4b71f37c91 | |||
| cdfbe5b2e3 | |||
| 1b6a459df4 | |||
| 73c0305dc4 | |||
| 0e01e67257 | |||
| 1ad329f9cf | |||
| d03d1d2393 | |||
| 832516be2a |
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
|
||||
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.26.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.27.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
@@ -26,6 +26,10 @@ inputs:
|
||||
description: 'Whether to enable Poetry dependency caching via actions/setup-python'
|
||||
required: false
|
||||
default: 'true'
|
||||
extras:
|
||||
description: 'Comma-separated list of project extras to install (e.g. "okta"). Pass "all" to install every extra.'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -87,8 +91,20 @@ runs:
|
||||
if: inputs.install-dependencies == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
env:
|
||||
INPUTS_EXTRAS: ${{ inputs.extras }}
|
||||
run: |
|
||||
poetry install --no-root
|
||||
if [ "$INPUTS_EXTRAS" = "all" ]; then
|
||||
poetry install --no-root --all-extras
|
||||
elif [ -n "$INPUTS_EXTRAS" ]; then
|
||||
EXTRAS_ARGS=""
|
||||
for extra in ${INPUTS_EXTRAS//,/ }; do
|
||||
EXTRAS_ARGS="$EXTRAS_ARGS -E $extra"
|
||||
done
|
||||
poetry install --no-root $EXTRAS_ARGS
|
||||
else
|
||||
poetry install --no-root
|
||||
fi
|
||||
poetry run pip list
|
||||
|
||||
- name: Update Prowler Cloud API Client
|
||||
|
||||
@@ -72,6 +72,11 @@ provider/vercel:
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/**"
|
||||
|
||||
provider/okta:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/okta/**"
|
||||
- any-glob-to-any-file: "tests/providers/okta/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -109,6 +114,8 @@ mutelist:
|
||||
- any-glob-to-any-file: "tests/providers/googleworkspace/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/okta/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/okta/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
|
||||
@@ -76,6 +76,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: all
|
||||
|
||||
- name: Check Poetry lock file
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -92,6 +92,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: '3.12'
|
||||
extras: all
|
||||
|
||||
- name: Security scan with Bandit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -97,6 +97,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: all
|
||||
|
||||
# AWS Provider
|
||||
- name: Check if AWS files changed
|
||||
@@ -324,6 +325,30 @@ jobs:
|
||||
flags: prowler-py${{ matrix.python-version }}-github
|
||||
files: ./github_coverage.xml
|
||||
|
||||
# Okta Provider
|
||||
- name: Check if Okta files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-okta
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/okta/**
|
||||
./tests/**/okta/**
|
||||
./poetry.lock
|
||||
|
||||
- name: Run Okta tests
|
||||
if: steps.changed-okta.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/okta --cov-report=xml:okta_coverage.xml tests/providers/okta
|
||||
|
||||
- name: Upload Okta coverage to Codecov
|
||||
if: steps.changed-okta.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler-py${{ matrix.python-version }}-okta
|
||||
files: ./okta_coverage.xml
|
||||
|
||||
# NHN Provider
|
||||
- name: Check if NHN files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
name: 'UI: Tests'
|
||||
name: "UI: Tests"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
- "master"
|
||||
- "v5.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
- "master"
|
||||
- "v5.*"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -16,7 +16,7 @@ concurrency:
|
||||
|
||||
env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
NODE_VERSION: '24.13.0'
|
||||
NODE_VERSION: "24.13.0"
|
||||
|
||||
permissions: {}
|
||||
|
||||
@@ -42,6 +42,9 @@ jobs:
|
||||
fonts.gstatic.com:443
|
||||
api.github.com:443
|
||||
release-assets.githubusercontent.com:443
|
||||
cdn.playwright.dev:443
|
||||
objects.githubusercontent.com:443
|
||||
playwright.download.prss.microsoft.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -133,7 +136,7 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
echo "Critical paths changed - running ALL unit tests"
|
||||
pnpm run test:run
|
||||
pnpm run test:unit
|
||||
|
||||
- name: Run unit tests (related to changes only)
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed != 'true' && steps.changed-source.outputs.all_changed_files != ''
|
||||
@@ -142,7 +145,7 @@ jobs:
|
||||
echo "${STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES}"
|
||||
# Convert space-separated to vitest related format (remove ui/ prefix for relative paths)
|
||||
CHANGED_FILES=$(echo "${STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES}" | tr ' ' '\n' | sed 's|^ui/||' | tr '\n' ' ')
|
||||
pnpm exec vitest related $CHANGED_FILES --run
|
||||
pnpm exec vitest related $CHANGED_FILES --run --project unit
|
||||
env:
|
||||
STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES: ${{ steps.changed-source.outputs.all_changed_files }}
|
||||
|
||||
@@ -150,7 +153,25 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed != 'true' && steps.changed-source.outputs.all_changed_files == ''
|
||||
run: |
|
||||
echo "Only test files changed - running ALL unit tests"
|
||||
pnpm run test:run
|
||||
pnpm run test:unit
|
||||
|
||||
- name: Cache Playwright browsers
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: playwright-cache
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-chromium-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-chromium-
|
||||
|
||||
- name: Install Playwright Chromium browser
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: pnpm exec playwright install chromium
|
||||
|
||||
- name: Run browser tests
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run test:browser
|
||||
|
||||
- name: Build application
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -44,7 +44,9 @@ repos:
|
||||
rev: v1.24.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
files: ^\.github/
|
||||
# zizmor only audits workflows, composite actions and dependabot
|
||||
# config; broader paths trip exit 3 ("no audit was performed").
|
||||
files: ^\.github/(workflows|actions)/.+\.ya?ml$|^\.github/dependabot\.ya?ml$
|
||||
priority: 30
|
||||
|
||||
## BASH
|
||||
|
||||
@@ -15,7 +15,7 @@ Use these skills for detailed patterns on-demand:
|
||||
|-------|-------------|-----|
|
||||
| `typescript` | Const types, flat interfaces, utility types | [SKILL.md](skills/typescript/SKILL.md) |
|
||||
| `react-19` | No useMemo/useCallback, React Compiler | [SKILL.md](skills/react-19/SKILL.md) |
|
||||
| `nextjs-15` | App Router, Server Actions, streaming | [SKILL.md](skills/nextjs-15/SKILL.md) |
|
||||
| `nextjs-16` | App Router, Server Actions, proxy.ts, streaming | [SKILL.md](skills/nextjs-16/SKILL.md) |
|
||||
| `tailwind-4` | cn() utility, no var() in className | [SKILL.md](skills/tailwind-4/SKILL.md) |
|
||||
| `playwright` | Page Object Model, MCP workflow, selectors | [SKILL.md](skills/playwright/SKILL.md) |
|
||||
| `pytest` | Fixtures, mocking, markers, parametrize | [SKILL.md](skills/pytest/SKILL.md) |
|
||||
@@ -60,11 +60,14 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding DRF pagination or permissions | `django-drf` |
|
||||
| Adding a compliance output formatter (per-provider class + table dispatcher) | `prowler-compliance` |
|
||||
| Adding indexes or constraints to database tables | `django-migration-psql` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| After creating/modifying a skill | `skill-sync` |
|
||||
| App Router / Server Actions | `nextjs-15` |
|
||||
| App Router / Server Actions | `nextjs-16` |
|
||||
| Auditing check-to-requirement mappings as a cloud auditor | `prowler-compliance` |
|
||||
| Building AI chat features | `ai-sdk-5` |
|
||||
| Committing changes | `prowler-commit` |
|
||||
| Configuring MCP servers in agentic workflows | `gh-aw` |
|
||||
@@ -78,6 +81,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Creating a git commit | `prowler-commit` |
|
||||
| Creating new checks | `prowler-sdk-check` |
|
||||
| Creating new skills | `skill-creator` |
|
||||
| Creating or reviewing Django migrations | `django-migration-psql` |
|
||||
| Creating/modifying Prowler UI components | `prowler-ui` |
|
||||
| Creating/modifying models, views, serializers | `prowler-api` |
|
||||
| Creating/updating compliance frameworks | `prowler-compliance` |
|
||||
@@ -85,6 +89,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Debugging gh-aw compilation errors | `gh-aw` |
|
||||
| Fill .github/pull_request_template.md (Context/Description/Steps to review/Checklist) | `prowler-pr` |
|
||||
| Fixing bug | `tdd` |
|
||||
| Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs) | `prowler-compliance` |
|
||||
| General Prowler development questions | `prowler` |
|
||||
| Implementing JSON:API endpoints | `django-drf` |
|
||||
| Implementing feature | `tdd` |
|
||||
@@ -102,6 +107,8 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Review changelog format and conventions | `prowler-changelog` |
|
||||
| Reviewing JSON:API compliance | `jsonapi` |
|
||||
| Reviewing compliance framework PRs | `prowler-compliance-review` |
|
||||
| Running makemigrations or pgmakemigrations | `django-migration-psql` |
|
||||
| Syncing compliance framework with upstream catalog | `prowler-compliance` |
|
||||
| Testing RLS tenant isolation | `prowler-test-api` |
|
||||
| Testing hooks or utilities | `vitest` |
|
||||
| Troubleshoot why a skill is missing from AGENTS.md auto-invoke | `skill-sync` |
|
||||
@@ -129,6 +136,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Writing React components | `react-19` |
|
||||
| Writing TypeScript types/interfaces | `typescript` |
|
||||
| Writing Vitest tests | `vitest` |
|
||||
| Writing data backfill or data migration | `django-migration-psql` |
|
||||
| Writing documentation | `prowler-docs` |
|
||||
| Writing unit tests for UI | `vitest` |
|
||||
|
||||
@@ -142,7 +150,7 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
|-----------|----------|------------|
|
||||
| SDK | `prowler/` | Python 3.10+, Poetry 2.3+ |
|
||||
| API | `api/` | Django 5.1, DRF, Celery |
|
||||
| UI | `ui/` | Next.js 15, React 19, Tailwind 4 |
|
||||
| UI | `ui/` | Next.js 16, React 19, Tailwind 4 |
|
||||
| MCP Server | `mcp_server/` | FastMCP, Python 3.12+ |
|
||||
| Dashboard | `dashboard/` | Dash, Plotly |
|
||||
|
||||
|
||||
@@ -1,11 +1,34 @@
|
||||
# Do you want to learn on how to...
|
||||
|
||||
- Contribute with your code or fixes to Prowler
|
||||
- Create a new check for a provider
|
||||
- Create a new security compliance framework
|
||||
- Add a custom output format
|
||||
- Add a new integration
|
||||
- Contribute with documentation
|
||||
- [Contribute with your code or fixes to Prowler](https://docs.prowler.com/developer-guide/introduction)
|
||||
- [Create a new provider](https://docs.prowler.com/developer-guide/provider)
|
||||
- [Create a new service](https://docs.prowler.com/developer-guide/services)
|
||||
- [Create a new check for a provider](https://docs.prowler.com/developer-guide/checks)
|
||||
- [Create a new security compliance framework](https://docs.prowler.com/developer-guide/security-compliance-framework)
|
||||
- [Add a custom output format](https://docs.prowler.com/developer-guide/outputs)
|
||||
- [Add a new integration](https://docs.prowler.com/developer-guide/integrations)
|
||||
- [Contribute with documentation](https://docs.prowler.com/developer-guide/documentation)
|
||||
- [Write unit tests](https://docs.prowler.com/developer-guide/unit-testing)
|
||||
- [Write integration tests](https://docs.prowler.com/developer-guide/integration-testing)
|
||||
- [Write end-to-end tests](https://docs.prowler.com/developer-guide/end2end-testing)
|
||||
- [Debug Prowler](https://docs.prowler.com/developer-guide/debugging)
|
||||
- [Configure checks](https://docs.prowler.com/developer-guide/configurable-checks)
|
||||
- [Rename checks](https://docs.prowler.com/developer-guide/renaming-checks)
|
||||
- [Follow the check metadata guidelines](https://docs.prowler.com/developer-guide/check-metadata-guidelines)
|
||||
- [Extend the MCP server](https://docs.prowler.com/developer-guide/mcp-server)
|
||||
- [Extend Lighthouse AI](https://docs.prowler.com/developer-guide/lighthouse-architecture)
|
||||
- [Add AI skills](https://docs.prowler.com/developer-guide/ai-skills)
|
||||
|
||||
Provider-specific developer notes:
|
||||
|
||||
- [AWS](https://docs.prowler.com/developer-guide/aws-details)
|
||||
- [Azure](https://docs.prowler.com/developer-guide/azure-details)
|
||||
- [Google Cloud](https://docs.prowler.com/developer-guide/gcp-details)
|
||||
- [Alibaba Cloud](https://docs.prowler.com/developer-guide/alibabacloud-details)
|
||||
- [Kubernetes](https://docs.prowler.com/developer-guide/kubernetes-details)
|
||||
- [Microsoft 365](https://docs.prowler.com/developer-guide/m365-details)
|
||||
- [GitHub](https://docs.prowler.com/developer-guide/github-details)
|
||||
- [LLM](https://docs.prowler.com/developer-guide/llm-details)
|
||||
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry==2.3.4
|
||||
|
||||
RUN poetry install --compile && \
|
||||
RUN poetry install --compile --all-extras && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
# Install PowerShell modules
|
||||
|
||||
@@ -117,9 +117,10 @@ Every AWS provider scan will enqueue an Attack Paths ingestion job automatically
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 8 | Official | UI, API, CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | CLI |
|
||||
| Image | N/A | N/A | N/A | N/A | Official | CLI, API |
|
||||
| Google Workspace | 25 | 4 | 2 | 4 | Official | CLI |
|
||||
| Google Workspace | 25 | 4 | 2 | 4 | Official | UI, API, CLI |
|
||||
| OpenStack | 34 | 5 | 0 | 9 | Official | UI, API, CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | UI, API, CLI |
|
||||
| Okta | 1 | 1 | 0 | 1 | Official | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
> [!Note]
|
||||
|
||||
@@ -2,14 +2,44 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.27.0] (Prowler UNRELEASED)
|
||||
## [1.28.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- GIN index on `findings(categories, resource_services, resource_regions, resource_types)` to speed up `/api/v1/finding-groups` array filters [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Remove orphaned `gin_resources_search_idx` declaration from `Resource.Meta.indexes` (DB index dropped in `0072_drop_unused_indexes`) [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.2] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Attack Paths: BEDROCK-001 and BEDROCK-002 now target roles trusting `bedrock-agentcore.amazonaws.com` instead of `bedrock.amazonaws.com`, eliminating false positives against regular Bedrock service roles (Agents, Knowledge Bases, model invocation) [(#11141)](https://github.com/prowler-cloud/prowler/pull/11141)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `POST /api/v1/scans` was intermittently failing with `Scan matching query does not exist` in the `scan-perform` worker; the Celery task is now published via `transaction.on_commit` so the worker cannot read the Scan before the dispatch-wide transaction commits [(#11122)](https://github.com/prowler-cloud/prowler/pull/11122)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.0] (Prowler v5.26.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- New `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
|
||||
- ASD Essential Eight (AWS) compliance framework support [(#10982)](https://github.com/prowler-cloud/prowler/pull/10982)
|
||||
- `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- ASD Essential Eight (AWS) compliance framework support [(#10982)](https://github.com/prowler-cloud/prowler/pull/10982)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `trivy` binary from 0.69.2 to 0.70.0 and `cryptography` from 46.0.6 to 46.0.7 (transitive via prowler SDK) in the API image for CVE-2026-33186 and CVE-2026-39892 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
@@ -50,7 +50,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.27.0"
|
||||
version = "1.28.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -484,8 +484,8 @@ AWS_BEDROCK_PRIVESC_PASSROLE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust Bedrock service (can be passed to Bedrock)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
// Find roles that trust the Bedrock AgentCore service (can be passed to a code interpreter)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
WHERE any(resource IN stmt_passrole.resource WHERE
|
||||
resource = '*'
|
||||
OR target_role.arn CONTAINS resource
|
||||
@@ -536,8 +536,8 @@ AWS_BEDROCK_PRIVESC_INVOKE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust Bedrock service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
// Find roles that trust the Bedrock AgentCore service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
|
||||
WITH collect(path_principal) + collect(path_target) AS paths
|
||||
UNWIND paths AS p
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0090_attack_paths_cleanup_priority"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_arrays_idx",
|
||||
columns="categories, resource_services, resource_regions, resource_types",
|
||||
method="GIN",
|
||||
all_partitions=True,
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_arrays_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,73 @@
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
|
||||
INDEX_NAME = "gin_find_arrays_idx"
|
||||
PARENT_TABLE = "findings"
|
||||
|
||||
|
||||
def create_parent_and_attach(apps, schema_editor):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
# Idempotent: the parent index may already exist if it was created
|
||||
# manually on an environment before this migration ran.
|
||||
cursor.execute(
|
||||
f"CREATE INDEX IF NOT EXISTS {INDEX_NAME} ON ONLY {PARENT_TABLE} "
|
||||
f"USING gin (categories, resource_services, resource_regions, resource_types)"
|
||||
)
|
||||
cursor.execute(
|
||||
"SELECT inhrelid::regclass::text "
|
||||
"FROM pg_inherits "
|
||||
"WHERE inhparent = %s::regclass",
|
||||
[PARENT_TABLE],
|
||||
)
|
||||
for (partition,) in cursor.fetchall():
|
||||
child_idx = f"{partition.replace('.', '_')}_{INDEX_NAME}"
|
||||
# ALTER INDEX ... ATTACH PARTITION has no IF NOT ATTACHED clause,
|
||||
# so check pg_inherits first to keep the migration re-runnable.
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT 1
|
||||
FROM pg_inherits i
|
||||
JOIN pg_class p ON p.oid = i.inhparent
|
||||
JOIN pg_class c ON c.oid = i.inhrelid
|
||||
WHERE p.relname = %s AND c.relname = %s
|
||||
""",
|
||||
[INDEX_NAME, child_idx],
|
||||
)
|
||||
if cursor.fetchone() is None:
|
||||
cursor.execute(f"ALTER INDEX {INDEX_NAME} ATTACH PARTITION {child_idx}")
|
||||
|
||||
|
||||
def drop_parent_index(apps, schema_editor):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP INDEX IF EXISTS {INDEX_NAME}")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0091_findings_arrays_gin_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.SeparateDatabaseAndState(
|
||||
state_operations=[
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=[
|
||||
"categories",
|
||||
"resource_services",
|
||||
"resource_regions",
|
||||
"resource_types",
|
||||
],
|
||||
name=INDEX_NAME,
|
||||
),
|
||||
),
|
||||
],
|
||||
database_operations=[
|
||||
migrations.RunPython(
|
||||
create_parent_and_attach,
|
||||
reverse_code=drop_parent_index,
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -946,7 +946,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
OpClass(Upper("name"), name="gin_trgm_ops"),
|
||||
name="res_name_trgm_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
|
||||
models.Index(fields=["tenant_id", "id"], name="resources_tenant_id_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
@@ -1152,6 +1151,15 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
GinIndex(
|
||||
fields=[
|
||||
"categories",
|
||||
"resource_services",
|
||||
"resource_regions",
|
||||
"resource_types",
|
||||
],
|
||||
name="gin_find_arrays_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Prowler API
|
||||
version: 1.27.0
|
||||
version: 1.28.0
|
||||
description: |-
|
||||
Prowler API specification.
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
@@ -16,7 +17,7 @@ from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
|
||||
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
|
||||
from allauth.socialaccount.providers.saml.views import FinishACSView, LoginView
|
||||
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
|
||||
from celery import chain
|
||||
from celery import chain, states
|
||||
from celery.result import AsyncResult
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.env import env
|
||||
@@ -60,6 +61,7 @@ from django.utils.dateparse import parse_date
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.models import TaskResult
|
||||
from drf_spectacular.settings import spectacular_settings
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import (
|
||||
@@ -422,7 +424,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.27.0"
|
||||
spectacular_settings.VERSION = "1.28.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -2534,28 +2536,45 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
def create(self, request, *args, **kwargs):
|
||||
input_serializer = self.get_serializer(data=request.data)
|
||||
input_serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Broker publish is deferred to on_commit so the worker cannot read
|
||||
# Scan before BaseRLSViewSet's dispatch-wide atomic commits.
|
||||
pre_task_id = str(uuid.uuid4())
|
||||
|
||||
with transaction.atomic():
|
||||
scan = input_serializer.save()
|
||||
with transaction.atomic():
|
||||
task = perform_scan_task.apply_async(
|
||||
kwargs={
|
||||
"tenant_id": self.request.tenant_id,
|
||||
"scan_id": str(scan.id),
|
||||
"provider_id": str(scan.provider_id),
|
||||
# Disabled for now
|
||||
# checks_to_execute=scan.scanner_args.get("checks_to_execute")
|
||||
},
|
||||
scan.task_id = pre_task_id
|
||||
scan.save(update_fields=["task_id"])
|
||||
|
||||
attack_paths_db_utils.create_attack_paths_scan(
|
||||
tenant_id=self.request.tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
provider_id=str(scan.provider_id),
|
||||
)
|
||||
|
||||
attack_paths_db_utils.create_attack_paths_scan(
|
||||
tenant_id=self.request.tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
provider_id=str(scan.provider_id),
|
||||
)
|
||||
task_result, _ = TaskResult.objects.get_or_create(
|
||||
task_id=pre_task_id,
|
||||
defaults={"status": states.PENDING, "task_name": "scan-perform"},
|
||||
)
|
||||
prowler_task, _ = Task.objects.update_or_create(
|
||||
id=pre_task_id,
|
||||
tenant_id=self.request.tenant_id,
|
||||
defaults={"task_runner_task": task_result},
|
||||
)
|
||||
|
||||
prowler_task = Task.objects.get(id=task.id)
|
||||
scan.task_id = task.id
|
||||
scan.save(update_fields=["task_id"])
|
||||
scan_kwargs = {
|
||||
"tenant_id": self.request.tenant_id,
|
||||
"scan_id": str(scan.id),
|
||||
"provider_id": str(scan.provider_id),
|
||||
# Disabled for now
|
||||
# checks_to_execute=scan.scanner_args.get("checks_to_execute")
|
||||
}
|
||||
|
||||
transaction.on_commit(
|
||||
lambda: perform_scan_task.apply_async(
|
||||
kwargs=scan_kwargs, task_id=pre_task_id
|
||||
)
|
||||
)
|
||||
|
||||
self.response_serializer_class = TaskSerializer
|
||||
output_serializer = self.get_serializer(prowler_task)
|
||||
|
||||
@@ -0,0 +1,335 @@
|
||||
# AWS Inventory Connectivity Graph
|
||||
|
||||
A community-contributed tool that generates interactive connectivity graphs from Prowler AWS scans, visualizing relationships between AWS resources with zero additional API calls.
|
||||
|
||||
## Overview
|
||||
|
||||
This tool extends Prowler by producing two artifacts after a scan completes:
|
||||
|
||||
- **`<output>.inventory.json`** – Machine-readable graph (nodes + edges)
|
||||
- **`<output>.inventory.html`** – Interactive D3.js force-directed visualization
|
||||
|
||||
### Why?
|
||||
|
||||
Prowler's existing outputs (CSV, ASFF, OCSF, HTML) report individual check findings but provide no cross-service topology view. Security engineers need to understand **how** resources are connected—which Lambda functions sit inside which VPC, which IAM roles can be assumed by which services, which event sources trigger which functions—before they can reason about attack paths, blast-radius, or lateral-movement risk.
|
||||
|
||||
This tool fills that gap by building a connectivity graph from the service clients that are already loaded during a Prowler scan.
|
||||
|
||||
## Features
|
||||
|
||||
### Supported AWS Services
|
||||
|
||||
The tool currently extracts connectivity information from:
|
||||
|
||||
- **Lambda** – Functions, VPC/subnet/SG edges, event source mappings, layers, DLQ, KMS
|
||||
- **EC2** – Instances, security groups, subnet/VPC edges
|
||||
- **VPC** – VPCs, subnets, peering connections
|
||||
- **RDS** – DB instances, VPC/SG/cluster/KMS edges
|
||||
- **ELBv2** – ALB/NLB load balancers, SG and VPC edges
|
||||
- **S3** – Buckets, replication targets, logging buckets, KMS keys
|
||||
- **IAM** – Roles, trust-relationship edges (who can assume what)
|
||||
|
||||
### Edge Semantic Types
|
||||
|
||||
Edges are typed for downstream filtering and attack-path analysis:
|
||||
|
||||
- `network` – Resources share a network path (VPC/subnet/SG)
|
||||
- `iam` – IAM trust or permission relationship
|
||||
- `triggers` – One resource can invoke another (event source → Lambda)
|
||||
- `data_flow` – Data is written/read (Lambda → SQS dead-letter queue)
|
||||
- `depends_on` – Soft dependency (Lambda layer, subnet belongs to VPC)
|
||||
- `routes_to` – Traffic routing (LB → target)
|
||||
- `replicates_to` – S3 replication
|
||||
- `encrypts` – KMS key encrypts the resource
|
||||
- `logs_to` – Logging relationship
|
||||
|
||||
### Interactive HTML Graph Features
|
||||
|
||||
- Force-directed layout with drag-and-drop node pinning
|
||||
- Zoom / pan (mouse wheel + click-drag on background)
|
||||
- Per-service color-coded nodes with a legend
|
||||
- Hover tooltips showing ARN + all metadata properties
|
||||
- Service filter dropdown (show only Lambda, EC2, RDS, etc.)
|
||||
- Adjustable link-distance and charge-strength physics sliders
|
||||
- Edge labels on every arrow
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.9.1 or higher
|
||||
- Prowler installed and configured (see [Prowler documentation](https://docs.prowler.com/))
|
||||
|
||||
### Setup
|
||||
|
||||
1. Clone or download this directory to your local machine
|
||||
2. Ensure Prowler is installed and working
|
||||
3. No additional dependencies required beyond Prowler's existing requirements
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
Run Prowler with your desired checks, then use the inventory graph script:
|
||||
|
||||
```bash
|
||||
# Run Prowler scan (example)
|
||||
prowler aws --output-formats csv
|
||||
|
||||
# Generate inventory graph from the scan
|
||||
python contrib/inventory-graph/inventory_graph.py --output-directory ./output
|
||||
```
|
||||
|
||||
### Command-Line Options
|
||||
|
||||
```bash
|
||||
python contrib/inventory-graph/inventory_graph.py [OPTIONS]
|
||||
|
||||
Options:
|
||||
--output-directory DIR Directory to save output files (default: ./output)
|
||||
--output-filename NAME Base filename without extension (default: prowler-inventory-<timestamp>)
|
||||
--help Show this help message and exit
|
||||
```
|
||||
|
||||
### Example Workflow
|
||||
|
||||
```bash
|
||||
# 1. Run a Prowler scan on your AWS account
|
||||
prowler aws --profile my-aws-profile --output-formats csv html
|
||||
|
||||
# 2. Generate the inventory graph
|
||||
python contrib/inventory-graph/inventory_graph.py \
|
||||
--output-directory ./output \
|
||||
--output-filename my-aws-inventory
|
||||
|
||||
# 3. Open the HTML file in your browser
|
||||
open output/my-aws-inventory.inventory.html
|
||||
```
|
||||
|
||||
### Integration with Prowler Scans
|
||||
|
||||
The tool reads from already-loaded AWS service clients in memory (via `sys.modules`). This means:
|
||||
|
||||
- **Zero extra AWS API calls** – Uses data already collected during the Prowler scan
|
||||
- **Graceful degradation** – Services not scanned are silently skipped
|
||||
- **Flexible** – Works with any subset of Prowler checks
|
||||
|
||||
## Output Files
|
||||
|
||||
### JSON Output (`*.inventory.json`)
|
||||
|
||||
Machine-readable graph structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"generated_at": "2026-03-19T12:34:56Z",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "arn:aws:lambda:us-east-1:123456789012:function:my-function",
|
||||
"type": "lambda_function",
|
||||
"name": "my-function",
|
||||
"service": "lambda",
|
||||
"region": "us-east-1",
|
||||
"account_id": "123456789012",
|
||||
"properties": {
|
||||
"runtime": "python3.9",
|
||||
"vpc_id": "vpc-abc123"
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"source_id": "arn:aws:lambda:...",
|
||||
"target_id": "arn:aws:ec2:...:vpc/vpc-abc123",
|
||||
"edge_type": "network",
|
||||
"label": "in-vpc"
|
||||
}
|
||||
],
|
||||
"stats": {
|
||||
"node_count": 42,
|
||||
"edge_count": 87
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### HTML Output (`*.inventory.html`)
|
||||
|
||||
Self-contained interactive visualization that opens in any modern browser. No server or build step required.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Design Decisions
|
||||
|
||||
| Decision | Rationale |
|
||||
|----------|-----------|
|
||||
| **Read from sys.modules** | Zero extra AWS API calls; services not scanned are silently skipped |
|
||||
| **Self-contained HTML** | D3.js v7 via CDN; no server, no build step; opens in any browser |
|
||||
| **One extractor per service** | Each extractor is independently testable; adding a new service = one new file + one line in the registry |
|
||||
| **Typed edges** | Semantic types allow downstream consumers (attack-path tools, Neo4j import) to filter by relationship class |
|
||||
|
||||
### Project Structure
|
||||
|
||||
```
|
||||
contrib/inventory-graph/
|
||||
├── README.md # This file
|
||||
├── inventory_graph.py # Main entry point script
|
||||
├── lib/
|
||||
│ ├── __init__.py
|
||||
│ ├── models.py # ResourceNode, ResourceEdge, ConnectivityGraph dataclasses
|
||||
│ ├── graph_builder.py # Reads loaded service clients from sys.modules
|
||||
│ ├── inventory_output.py # write_json(), write_html()
|
||||
│ └── extractors/
|
||||
│ ├── __init__.py
|
||||
│ ├── lambda_extractor.py # Lambda functions → VPC/subnet/SG/event-sources/layers/DLQ/KMS
|
||||
│ ├── ec2_extractor.py # EC2 instances + security groups → subnet/VPC
|
||||
│ ├── vpc_extractor.py # VPCs, subnets, peering connections
|
||||
│ ├── rds_extractor.py # RDS instances → VPC/SG/cluster/KMS
|
||||
│ ├── elbv2_extractor.py # ALB/NLB load balancers → SG/VPC
|
||||
│ ├── s3_extractor.py # S3 buckets → replication targets/logging buckets/KMS keys
|
||||
│ └── iam_extractor.py # IAM roles + trust-relationship edges
|
||||
└── examples/
|
||||
└── sample_output.html # Example output (optional)
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
### Smoke Test (No AWS Credentials Needed)
|
||||
|
||||
```python
|
||||
import sys
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# Wire a fake Lambda client
|
||||
mock_module = MagicMock()
|
||||
mock_fn = MagicMock()
|
||||
mock_fn.arn = "arn:aws:lambda:us-east-1:123:function:test"
|
||||
mock_fn.name = "test"
|
||||
mock_fn.region = "us-east-1"
|
||||
mock_fn.vpc_id = "vpc-abc"
|
||||
mock_fn.security_groups = ["sg-111"]
|
||||
mock_fn.subnet_ids = {"subnet-aaa"}
|
||||
mock_fn.environment = None
|
||||
mock_fn.kms_key_arn = None
|
||||
mock_fn.layers = []
|
||||
mock_fn.dead_letter_config = None
|
||||
mock_fn.event_source_mappings = []
|
||||
mock_module.awslambda_client.functions = {mock_fn.arn: mock_fn}
|
||||
mock_module.awslambda_client.audited_account = "123"
|
||||
sys.modules["prowler.providers.aws.services.awslambda.awslambda_client"] = mock_module
|
||||
|
||||
from contrib.inventory_graph.lib.graph_builder import build_graph
|
||||
from contrib.inventory_graph.lib.inventory_output import write_json, write_html
|
||||
|
||||
graph = build_graph()
|
||||
write_json(graph, "/tmp/test.inventory.json")
|
||||
write_html(graph, "/tmp/test.inventory.html")
|
||||
# Open /tmp/test.inventory.html in a browser
|
||||
```
|
||||
|
||||
## Extending
|
||||
|
||||
### Adding a New Service
|
||||
|
||||
1. Create a new extractor file in `lib/extractors/` (e.g., `dynamodb_extractor.py`)
|
||||
2. Implement the `extract(client)` function that returns `(nodes, edges)`
|
||||
3. Register it in `lib/graph_builder.py` in the `_SERVICE_REGISTRY` tuple
|
||||
|
||||
Example extractor template:
|
||||
|
||||
```python
|
||||
from typing import List, Tuple
|
||||
from prowler.lib.outputs.inventory.models import ResourceNode, ResourceEdge
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""Extract DynamoDB tables and their relationships."""
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
for table in client.tables:
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=table.arn,
|
||||
type="dynamodb_table",
|
||||
name=table.name,
|
||||
service="dynamodb",
|
||||
region=table.region,
|
||||
account_id=client.audited_account,
|
||||
properties={"billing_mode": table.billing_mode}
|
||||
)
|
||||
)
|
||||
|
||||
# Add edges for KMS encryption, streams, etc.
|
||||
if table.kms_key_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=table.kms_key_arn,
|
||||
target_id=table.arn,
|
||||
edge_type="encrypts",
|
||||
label="encrypts"
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### No nodes discovered
|
||||
|
||||
**Problem:** The tool reports "no nodes discovered" after running.
|
||||
|
||||
**Solution:** Ensure you've run a Prowler scan first. The tool reads from in-memory service clients loaded during the scan. If no services were scanned, no nodes will be discovered.
|
||||
|
||||
### Missing services in the graph
|
||||
|
||||
**Problem:** Some AWS services are not appearing in the graph.
|
||||
|
||||
**Solution:** The tool only includes services that have been scanned by Prowler. Run Prowler with the services you want to include, or run without service filters to scan all available services.
|
||||
|
||||
### HTML file doesn't display properly
|
||||
|
||||
**Problem:** The HTML visualization doesn't load or shows errors.
|
||||
|
||||
**Solution:**
|
||||
- Ensure you're opening the file in a modern browser (Chrome, Firefox, Safari, Edge)
|
||||
- Check your browser's console for JavaScript errors
|
||||
- Verify the file was generated completely (check file size > 0)
|
||||
- The HTML requires internet access to load D3.js from CDN
|
||||
|
||||
## Roadmap
|
||||
|
||||
Potential future enhancements:
|
||||
|
||||
- [ ] Support for additional AWS services (DynamoDB, SQS, SNS, etc.)
|
||||
- [ ] Export to Neo4j / Cartography format
|
||||
- [ ] Attack path analysis integration
|
||||
- [ ] Multi-account/multi-region aggregation
|
||||
- [ ] Custom edge type filtering in HTML UI
|
||||
- [ ] Graph diff between two scans
|
||||
|
||||
## Contributing
|
||||
|
||||
This is a community contribution. If you'd like to enhance it:
|
||||
|
||||
1. Fork the Prowler repository
|
||||
2. Make your changes in `contrib/inventory-graph/`
|
||||
3. Test thoroughly
|
||||
4. Submit a pull request with a clear description
|
||||
|
||||
## License
|
||||
|
||||
This tool is part of the Prowler project and is licensed under the Apache License 2.0.
|
||||
|
||||
## Credits
|
||||
|
||||
- **Author:** [@sandiyochristan](https://github.com/sandiyochristan)
|
||||
- **Related PR:** [#10382](https://github.com/prowler-cloud/prowler/pull/10382)
|
||||
- **Prowler Project:** [prowler-cloud/prowler](https://github.com/prowler-cloud/prowler)
|
||||
|
||||
## Support
|
||||
|
||||
For issues or questions:
|
||||
|
||||
- Open an issue in the [Prowler repository](https://github.com/prowler-cloud/prowler/issues)
|
||||
- Join the [Prowler Community Slack](https://goto.prowler.com/slack)
|
||||
- Tag your issue with `contrib:inventory-graph`
|
||||
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Example: Generate AWS Inventory Graph with Mock Data
|
||||
|
||||
This example demonstrates how to use the inventory graph tool with mock AWS data.
|
||||
No AWS credentials required.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib.graph_builder import build_graph
|
||||
from lib.inventory_output import write_json, write_html
|
||||
|
||||
|
||||
def create_mock_lambda_client():
|
||||
"""Create a mock Lambda client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock Lambda function
|
||||
mock_fn = MagicMock()
|
||||
mock_fn.arn = "arn:aws:lambda:us-east-1:123456789012:function:my-test-function"
|
||||
mock_fn.name = "my-test-function"
|
||||
mock_fn.region = "us-east-1"
|
||||
mock_fn.vpc_id = "vpc-abc123"
|
||||
mock_fn.security_groups = ["sg-111222"]
|
||||
mock_fn.subnet_ids = {"subnet-aaa111", "subnet-bbb222"}
|
||||
mock_fn.environment = {"Variables": {"ENV": "production"}}
|
||||
mock_fn.kms_key_arn = (
|
||||
"arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012"
|
||||
)
|
||||
mock_fn.layers = []
|
||||
mock_fn.dead_letter_config = None
|
||||
mock_fn.event_source_mappings = []
|
||||
|
||||
mock_module.awslambda_client.functions = {mock_fn.arn: mock_fn}
|
||||
mock_module.awslambda_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def create_mock_ec2_client():
|
||||
"""Create a mock EC2 client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock EC2 instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.arn = (
|
||||
"arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0"
|
||||
)
|
||||
mock_instance.id = "i-1234567890abcdef0"
|
||||
mock_instance.region = "us-east-1"
|
||||
mock_instance.vpc_id = "vpc-abc123"
|
||||
mock_instance.subnet_id = "subnet-aaa111"
|
||||
mock_instance.security_groups = [MagicMock(id="sg-111222")]
|
||||
mock_instance.state = "running"
|
||||
mock_instance.type = "t3.micro"
|
||||
mock_instance.tags = [{"Key": "Name", "Value": "test-instance"}]
|
||||
|
||||
# Create a mock security group
|
||||
mock_sg = MagicMock()
|
||||
mock_sg.arn = "arn:aws:ec2:us-east-1:123456789012:security-group/sg-111222"
|
||||
mock_sg.id = "sg-111222"
|
||||
mock_sg.name = "test-security-group"
|
||||
mock_sg.region = "us-east-1"
|
||||
mock_sg.vpc_id = "vpc-abc123"
|
||||
|
||||
mock_module.ec2_client.instances = [mock_instance]
|
||||
mock_module.ec2_client.security_groups = [mock_sg]
|
||||
mock_module.ec2_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def create_mock_vpc_client():
|
||||
"""Create a mock VPC client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock VPC
|
||||
mock_vpc = MagicMock()
|
||||
mock_vpc.arn = "arn:aws:ec2:us-east-1:123456789012:vpc/vpc-abc123"
|
||||
mock_vpc.id = "vpc-abc123"
|
||||
mock_vpc.region = "us-east-1"
|
||||
mock_vpc.cidr_block = "10.0.0.0/16"
|
||||
mock_vpc.tags = [{"Key": "Name", "Value": "test-vpc"}]
|
||||
|
||||
# Create mock subnets
|
||||
mock_subnet1 = MagicMock()
|
||||
mock_subnet1.arn = "arn:aws:ec2:us-east-1:123456789012:subnet/subnet-aaa111"
|
||||
mock_subnet1.id = "subnet-aaa111"
|
||||
mock_subnet1.region = "us-east-1"
|
||||
mock_subnet1.vpc_id = "vpc-abc123"
|
||||
mock_subnet1.cidr_block = "10.0.1.0/24"
|
||||
mock_subnet1.availability_zone = "us-east-1a"
|
||||
|
||||
mock_subnet2 = MagicMock()
|
||||
mock_subnet2.arn = "arn:aws:ec2:us-east-1:123456789012:subnet/subnet-bbb222"
|
||||
mock_subnet2.id = "subnet-bbb222"
|
||||
mock_subnet2.region = "us-east-1"
|
||||
mock_subnet2.vpc_id = "vpc-abc123"
|
||||
mock_subnet2.cidr_block = "10.0.2.0/24"
|
||||
mock_subnet2.availability_zone = "us-east-1b"
|
||||
|
||||
mock_module.vpc_client.vpcs = [mock_vpc]
|
||||
mock_module.vpc_client.subnets = [mock_subnet1, mock_subnet2]
|
||||
mock_module.vpc_client.vpc_peering_connections = []
|
||||
mock_module.vpc_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to demonstrate the inventory graph generation."""
|
||||
print("=" * 70)
|
||||
print("AWS Inventory Graph - Mock Data Example")
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
# Create mock clients and inject them into sys.modules
|
||||
print("Creating mock AWS service clients...")
|
||||
sys.modules["prowler.providers.aws.services.awslambda.awslambda_client"] = (
|
||||
create_mock_lambda_client()
|
||||
)
|
||||
sys.modules["prowler.providers.aws.services.ec2.ec2_client"] = (
|
||||
create_mock_ec2_client()
|
||||
)
|
||||
sys.modules["prowler.providers.aws.services.vpc.vpc_client"] = (
|
||||
create_mock_vpc_client()
|
||||
)
|
||||
print("✓ Mock clients created")
|
||||
print()
|
||||
|
||||
# Build the graph
|
||||
print("Building connectivity graph...")
|
||||
graph = build_graph()
|
||||
print(f"✓ Graph built: {len(graph.nodes)} nodes, {len(graph.edges)} edges")
|
||||
print()
|
||||
|
||||
# Display discovered nodes
|
||||
print("Discovered nodes:")
|
||||
for node in graph.nodes:
|
||||
print(f" - {node.type}: {node.name} ({node.region})")
|
||||
print()
|
||||
|
||||
# Display discovered edges
|
||||
print("Discovered edges:")
|
||||
for edge in graph.edges:
|
||||
source_node = next((n for n in graph.nodes if n.id == edge.source_id), None)
|
||||
target_node = next((n for n in graph.nodes if n.id == edge.target_id), None)
|
||||
source_name = source_node.name if source_node else edge.source_id
|
||||
target_name = target_node.name if target_node else edge.target_id
|
||||
print(f" - {source_name} --[{edge.edge_type}]--> {target_name}")
|
||||
print()
|
||||
|
||||
# Write outputs
|
||||
output_dir = Path(__file__).parent
|
||||
json_path = output_dir / "example_output.inventory.json"
|
||||
html_path = output_dir / "example_output.inventory.html"
|
||||
|
||||
print("Writing output files...")
|
||||
write_json(graph, str(json_path))
|
||||
write_html(graph, str(html_path))
|
||||
print(f"✓ JSON written to: {json_path}")
|
||||
print(f"✓ HTML written to: {html_path}")
|
||||
print()
|
||||
|
||||
print("=" * 70)
|
||||
print("✓ Example complete!")
|
||||
print("=" * 70)
|
||||
print()
|
||||
print(f"Open the HTML file to view the interactive graph:")
|
||||
print(f" open {html_path}")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AWS Inventory Connectivity Graph Generator
|
||||
|
||||
A standalone tool that generates interactive connectivity graphs from Prowler AWS scans.
|
||||
This tool reads from already-loaded AWS service clients in memory and produces:
|
||||
- JSON graph (nodes + edges)
|
||||
- Interactive HTML visualization
|
||||
|
||||
Usage:
|
||||
python inventory_graph.py --output-directory ./output --output-filename my-inventory
|
||||
|
||||
For more information, see README.md
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Add the contrib directory to the path so we can import the lib modules
|
||||
CONTRIB_DIR = Path(__file__).parent
|
||||
sys.path.insert(0, str(CONTRIB_DIR))
|
||||
|
||||
from lib.graph_builder import build_graph
|
||||
from lib.inventory_output import write_json, write_html
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
"""Parse command-line arguments."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate AWS inventory connectivity graph from Prowler scan data",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
# Generate graph with default settings
|
||||
python inventory_graph.py
|
||||
|
||||
# Specify custom output directory and filename
|
||||
python inventory_graph.py --output-directory ./my-output --output-filename aws-inventory
|
||||
|
||||
# After running a Prowler scan
|
||||
prowler aws --profile my-profile
|
||||
python inventory_graph.py --output-directory ./output
|
||||
|
||||
For more information, see README.md
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--output-directory",
|
||||
"-o",
|
||||
default="./output",
|
||||
help="Directory to save output files (default: ./output)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--output-filename",
|
||||
"-f",
|
||||
default=None,
|
||||
help="Base filename without extension (default: prowler-inventory-<timestamp>)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
"-v",
|
||||
action="store_true",
|
||||
help="Enable verbose output",
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point for the inventory graph generator."""
|
||||
args = parse_arguments()
|
||||
|
||||
# Set up output paths
|
||||
output_dir = Path(args.output_directory)
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate filename with timestamp if not provided
|
||||
if args.output_filename:
|
||||
base_filename = args.output_filename
|
||||
else:
|
||||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
base_filename = f"prowler-inventory-{timestamp}"
|
||||
|
||||
json_path = output_dir / f"{base_filename}.inventory.json"
|
||||
html_path = output_dir / f"{base_filename}.inventory.html"
|
||||
|
||||
print("=" * 70)
|
||||
print("AWS Inventory Connectivity Graph Generator")
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
# Build the graph from loaded service clients
|
||||
if args.verbose:
|
||||
print("Building connectivity graph from loaded AWS service clients...")
|
||||
|
||||
graph = build_graph()
|
||||
|
||||
# Check if any nodes were discovered
|
||||
if not graph.nodes:
|
||||
print("⚠️ WARNING: No nodes discovered!")
|
||||
print()
|
||||
print("This usually means:")
|
||||
print(" 1. No Prowler scan has been run yet in this Python session")
|
||||
print(" 2. No AWS service clients are loaded in memory")
|
||||
print()
|
||||
print("To fix this:")
|
||||
print(" 1. Run a Prowler scan first: prowler aws --output-formats csv")
|
||||
print(" 2. Then run this script in the same session")
|
||||
print()
|
||||
print(
|
||||
"Alternatively, integrate this tool directly into Prowler's output pipeline."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"✓ Discovered {len(graph.nodes)} nodes and {len(graph.edges)} edges")
|
||||
print()
|
||||
|
||||
# Write outputs
|
||||
if args.verbose:
|
||||
print(f"Writing JSON output to: {json_path}")
|
||||
write_json(graph, str(json_path))
|
||||
|
||||
if args.verbose:
|
||||
print(f"Writing HTML output to: {html_path}")
|
||||
write_html(graph, str(html_path))
|
||||
|
||||
print()
|
||||
print("=" * 70)
|
||||
print("✓ Graph generation complete!")
|
||||
print("=" * 70)
|
||||
print()
|
||||
print(f"📄 JSON: {json_path}")
|
||||
print(f"🌐 HTML: {html_path}")
|
||||
print()
|
||||
print(f"Open the HTML file in your browser to explore the interactive graph:")
|
||||
print(f" open {html_path}")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print("\n\nInterrupted by user. Exiting...")
|
||||
sys.exit(130)
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error: {e}", file=sys.stderr)
|
||||
if "--verbose" in sys.argv or "-v" in sys.argv:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
@@ -0,0 +1,94 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract EC2 instance and security-group nodes with their edges.
|
||||
|
||||
Edges produced:
|
||||
- instance → security-group [network]
|
||||
- instance → subnet [network]
|
||||
- security-group → VPC [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
# EC2 Instances
|
||||
for instance in client.instances:
|
||||
name = instance.id
|
||||
for tag in instance.tags or []:
|
||||
if tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
props = {
|
||||
"instance_type": getattr(instance, "type", None),
|
||||
"state": getattr(instance, "state", None),
|
||||
"vpc_id": getattr(instance, "vpc_id", None),
|
||||
"subnet_id": getattr(instance, "subnet_id", None),
|
||||
"public_ip": getattr(instance, "public_ip_address", None),
|
||||
"private_ip": getattr(instance, "private_ip_address", None),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=instance.arn,
|
||||
type="ec2_instance",
|
||||
name=name,
|
||||
service="ec2",
|
||||
region=instance.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg_id in instance.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=instance.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
if instance.subnet_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=instance.arn,
|
||||
target_id=instance.subnet_id,
|
||||
edge_type="network",
|
||||
label="subnet",
|
||||
)
|
||||
)
|
||||
|
||||
# Security Groups
|
||||
for sg in client.security_groups.values():
|
||||
name = (
|
||||
sg.name if hasattr(sg, "name") else sg.id if hasattr(sg, "id") else sg.arn
|
||||
)
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=sg.arn,
|
||||
type="security_group",
|
||||
name=name,
|
||||
service="ec2",
|
||||
region=sg.region,
|
||||
account_id=client.audited_account,
|
||||
properties={"vpc_id": sg.vpc_id},
|
||||
)
|
||||
)
|
||||
|
||||
if sg.vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=sg.arn,
|
||||
target_id=sg.vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,60 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract ELBv2 (ALB/NLB) load balancer nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- load_balancer → security-group [network]
|
||||
- load_balancer → VPC [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for lb in client.loadbalancersv2.values():
|
||||
props = {
|
||||
"type": getattr(lb, "type", None),
|
||||
"scheme": getattr(lb, "scheme", None),
|
||||
"dns_name": getattr(lb, "dns", None),
|
||||
"vpc_id": getattr(lb, "vpc_id", None),
|
||||
}
|
||||
|
||||
name = getattr(lb, "name", lb.arn.split("/")[-2] if "/" in lb.arn else lb.arn)
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=lb.arn,
|
||||
type="load_balancer",
|
||||
name=name,
|
||||
service="elbv2",
|
||||
region=lb.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg_id in lb.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=lb.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(lb, "vpc_id", None)
|
||||
if vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=lb.arn,
|
||||
target_id=vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,84 @@
|
||||
import json
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def _parse_trust_principals(assume_role_policy: Any) -> List[str]:
|
||||
"""
|
||||
Return a flat list of principal strings from an IAM assume-role policy document.
|
||||
The policy may be a dict already or a JSON string.
|
||||
"""
|
||||
if not assume_role_policy:
|
||||
return []
|
||||
|
||||
if isinstance(assume_role_policy, str):
|
||||
try:
|
||||
assume_role_policy = json.loads(assume_role_policy)
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return []
|
||||
|
||||
principals = []
|
||||
for statement in assume_role_policy.get("Statement", []):
|
||||
principal = statement.get("Principal", {})
|
||||
if isinstance(principal, str):
|
||||
principals.append(principal)
|
||||
elif isinstance(principal, dict):
|
||||
for v in principal.values():
|
||||
if isinstance(v, list):
|
||||
principals.extend(v)
|
||||
else:
|
||||
principals.append(v)
|
||||
elif isinstance(principal, list):
|
||||
principals.extend(principal)
|
||||
|
||||
return principals
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract IAM role nodes and their trust-relationship edges.
|
||||
|
||||
Edges produced:
|
||||
- trusted-principal → role [iam] (who can assume this role)
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for role in client.roles:
|
||||
props: Dict[str, Any] = {
|
||||
"path": getattr(role, "path", None),
|
||||
"create_date": str(getattr(role, "create_date", "") or ""),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=role.arn,
|
||||
type="iam_role",
|
||||
name=role.name,
|
||||
service="iam",
|
||||
region="global",
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v},
|
||||
)
|
||||
)
|
||||
|
||||
# Trust-relationship edges: principal → role (principal CAN assume role)
|
||||
try:
|
||||
for principal in _parse_trust_principals(role.assume_role_policy):
|
||||
if principal and principal != "*":
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=principal,
|
||||
target_id=role.arn,
|
||||
edge_type="iam",
|
||||
label="can-assume",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
f"inventory iam_extractor: could not parse trust policy for {role.arn}: {e}"
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,118 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract Lambda function nodes and their edges from an awslambda_client.
|
||||
|
||||
Edges produced:
|
||||
- lambda → VPC [network]
|
||||
- lambda → subnet [network]
|
||||
- lambda → sg [network]
|
||||
- lambda → event-source[triggers] (from EventSourceMapping)
|
||||
- lambda → layer ARN [depends_on]
|
||||
- lambda → DLQ target [data_flow]
|
||||
- lambda → KMS key [encrypts]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for fn in client.functions.values():
|
||||
props = {
|
||||
"runtime": fn.runtime,
|
||||
"vpc_id": fn.vpc_id,
|
||||
}
|
||||
if fn.environment:
|
||||
props["has_env_vars"] = True
|
||||
if fn.kms_key_arn:
|
||||
props["kms_key_arn"] = fn.kms_key_arn
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=fn.arn,
|
||||
type="lambda_function",
|
||||
name=fn.name,
|
||||
service="lambda",
|
||||
region=fn.region,
|
||||
account_id=client.audited_account,
|
||||
properties=props,
|
||||
)
|
||||
)
|
||||
|
||||
# Network edges → VPC, subnets, security groups
|
||||
if fn.vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=fn.vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
for sg_id in fn.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
for subnet_id in fn.subnet_ids or set():
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=subnet_id,
|
||||
edge_type="network",
|
||||
label="subnet",
|
||||
)
|
||||
)
|
||||
|
||||
# Trigger edges from event source mappings
|
||||
for esm in getattr(fn, "event_source_mappings", []):
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=esm.event_source_arn,
|
||||
target_id=fn.arn,
|
||||
edge_type="triggers",
|
||||
label=f"esm:{esm.state}",
|
||||
)
|
||||
)
|
||||
|
||||
# Layer dependency edges
|
||||
for layer in getattr(fn, "layers", []):
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=layer.arn,
|
||||
edge_type="depends_on",
|
||||
label="layer",
|
||||
)
|
||||
)
|
||||
|
||||
# Dead-letter queue data-flow edge
|
||||
dlq = getattr(fn, "dead_letter_config", None)
|
||||
if dlq and dlq.target_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=dlq.target_arn,
|
||||
edge_type="data_flow",
|
||||
label="dlq",
|
||||
)
|
||||
)
|
||||
|
||||
# KMS encryption edge
|
||||
if fn.kms_key_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.kms_key_arn,
|
||||
target_id=fn.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,86 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract RDS DB instance nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- db_instance → security-group [network]
|
||||
- db_instance → VPC [network]
|
||||
- db_instance → cluster [depends_on]
|
||||
- db_instance → KMS key [encrypts]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for db in client.db_instances.values():
|
||||
props = {
|
||||
"engine": getattr(db, "engine", None),
|
||||
"engine_version": getattr(db, "engine_version", None),
|
||||
"instance_class": getattr(db, "db_instance_class", None),
|
||||
"vpc_id": getattr(db, "vpc_id", None),
|
||||
"multi_az": getattr(db, "multi_az", None),
|
||||
"publicly_accessible": getattr(db, "publicly_accessible", None),
|
||||
"storage_encrypted": getattr(db, "storage_encrypted", None),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=db.arn,
|
||||
type="rds_instance",
|
||||
name=db.id,
|
||||
service="rds",
|
||||
region=db.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg in getattr(db, "security_groups", []):
|
||||
sg_id = sg if isinstance(sg, str) else getattr(sg, "id", str(sg))
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(db, "vpc_id", None)
|
||||
if vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
cluster_arn = getattr(db, "cluster_arn", None)
|
||||
if cluster_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=cluster_arn,
|
||||
edge_type="depends_on",
|
||||
label="cluster-member",
|
||||
)
|
||||
)
|
||||
|
||||
kms_key_id = getattr(db, "kms_key_id", None)
|
||||
if kms_key_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=kms_key_id,
|
||||
target_id=db.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,92 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract S3 bucket nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- bucket → replication-target bucket [replicates_to]
|
||||
- bucket → KMS key [encrypts]
|
||||
- bucket → logging bucket [logs_to]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for bucket in client.buckets.values():
|
||||
encryption = getattr(bucket, "encryption", None)
|
||||
versioning = getattr(bucket, "versioning_enabled", None)
|
||||
logging = getattr(bucket, "logging", None)
|
||||
public = getattr(bucket, "public_access_block", None)
|
||||
|
||||
props = {}
|
||||
if versioning is not None:
|
||||
props["versioning"] = versioning
|
||||
if encryption:
|
||||
enc_type = getattr(encryption, "type", str(encryption))
|
||||
props["encryption"] = enc_type
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=bucket.arn,
|
||||
type="s3_bucket",
|
||||
name=bucket.name,
|
||||
service="s3",
|
||||
region=bucket.region,
|
||||
account_id=client.audited_account,
|
||||
properties=props,
|
||||
)
|
||||
)
|
||||
|
||||
# Replication edges
|
||||
for rule in getattr(bucket, "replication_rules", None) or []:
|
||||
dest_bucket = getattr(rule, "destination_bucket", None)
|
||||
if dest_bucket:
|
||||
dest_arn = (
|
||||
dest_bucket
|
||||
if dest_bucket.startswith("arn:")
|
||||
else f"arn:aws:s3:::{dest_bucket}"
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=bucket.arn,
|
||||
target_id=dest_arn,
|
||||
edge_type="replicates_to",
|
||||
label="s3-replication",
|
||||
)
|
||||
)
|
||||
|
||||
# Logging edges
|
||||
if logging:
|
||||
target_bucket = getattr(logging, "target_bucket", None)
|
||||
if target_bucket:
|
||||
target_arn = (
|
||||
target_bucket
|
||||
if target_bucket.startswith("arn:")
|
||||
else f"arn:aws:s3:::{target_bucket}"
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=bucket.arn,
|
||||
target_id=target_arn,
|
||||
edge_type="logs_to",
|
||||
label="access-logs",
|
||||
)
|
||||
)
|
||||
|
||||
# KMS encryption edges
|
||||
if encryption:
|
||||
kms_arn = getattr(encryption, "kms_master_key_id", None)
|
||||
if kms_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=kms_arn,
|
||||
target_id=bucket.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,92 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract VPC and subnet nodes with their edges.
|
||||
|
||||
Edges produced:
|
||||
- subnet → VPC [depends_on]
|
||||
- peering connection between VPCs [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
# VPCs
|
||||
for vpc in client.vpcs.values():
|
||||
name = vpc.id if hasattr(vpc, "id") else vpc.arn
|
||||
for tag in vpc.tags or []:
|
||||
if isinstance(tag, dict) and tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=vpc.arn,
|
||||
type="vpc",
|
||||
name=name,
|
||||
service="vpc",
|
||||
region=vpc.region,
|
||||
account_id=client.audited_account,
|
||||
properties={
|
||||
"cidr_block": getattr(vpc, "cidr_block", None),
|
||||
"is_default": getattr(vpc, "is_default", None),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# VPC Subnets
|
||||
for subnet in client.vpc_subnets.values():
|
||||
name = subnet.id if hasattr(subnet, "id") else subnet.arn
|
||||
for tag in getattr(subnet, "tags", None) or []:
|
||||
if isinstance(tag, dict) and tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=subnet.arn,
|
||||
type="subnet",
|
||||
name=name,
|
||||
service="vpc",
|
||||
region=subnet.region,
|
||||
account_id=client.audited_account,
|
||||
properties={
|
||||
"vpc_id": getattr(subnet, "vpc_id", None),
|
||||
"cidr_block": getattr(subnet, "cidr_block", None),
|
||||
"availability_zone": getattr(subnet, "availability_zone", None),
|
||||
"public": getattr(subnet, "public", None),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(subnet, "vpc_id", None)
|
||||
if vpc_id:
|
||||
# Find the VPC ARN for this vpc_id
|
||||
vpc_arn = next(
|
||||
(v.arn for v in client.vpcs.values() if v.id == vpc_id),
|
||||
vpc_id,
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=subnet.arn,
|
||||
target_id=vpc_arn,
|
||||
edge_type="depends_on",
|
||||
label="subnet-of",
|
||||
)
|
||||
)
|
||||
|
||||
# VPC Peering Connections
|
||||
for peering in getattr(client, "vpc_peering_connections", {}).values():
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=peering.arn,
|
||||
target_id=getattr(peering, "accepter_vpc_id", peering.arn),
|
||||
edge_type="network",
|
||||
label="vpc-peer",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,106 @@
|
||||
"""
|
||||
graph_builder.py
|
||||
----------------
|
||||
Builds a ConnectivityGraph by reading already-loaded AWS service clients from
|
||||
sys.modules. Only services that were actually scanned (i.e. whose client
|
||||
module is already imported) contribute nodes and edges. Unknown / unloaded
|
||||
services are silently skipped, so the output degrades gracefully when only a
|
||||
subset of checks has been run.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from typing import Tuple
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ConnectivityGraph
|
||||
|
||||
# Registry: (sys.modules key, attribute name inside that module, extractor module path)
|
||||
_SERVICE_REGISTRY: Tuple[Tuple[str, str, str], ...] = (
|
||||
(
|
||||
"prowler.providers.aws.services.awslambda.awslambda_client",
|
||||
"awslambda_client",
|
||||
"lib.extractors.lambda_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.ec2.ec2_client",
|
||||
"ec2_client",
|
||||
"lib.extractors.ec2_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.vpc.vpc_client",
|
||||
"vpc_client",
|
||||
"lib.extractors.vpc_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.rds.rds_client",
|
||||
"rds_client",
|
||||
"lib.extractors.rds_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.elbv2.elbv2_client",
|
||||
"elbv2_client",
|
||||
"lib.extractors.elbv2_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.s3.s3_client",
|
||||
"s3_client",
|
||||
"lib.extractors.s3_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.iam.iam_client",
|
||||
"iam_client",
|
||||
"lib.extractors.iam_extractor",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def build_graph() -> ConnectivityGraph:
|
||||
"""
|
||||
Iterate over every registered service, check whether its client module is
|
||||
already loaded, and call the corresponding extractor.
|
||||
|
||||
Returns a ConnectivityGraph with all discovered nodes and edges.
|
||||
Duplicate node IDs are silently deduplicated (first occurrence wins).
|
||||
"""
|
||||
graph = ConnectivityGraph()
|
||||
seen_node_ids: set = set()
|
||||
|
||||
for client_module_key, client_attr, extractor_module_key in _SERVICE_REGISTRY:
|
||||
client_module = sys.modules.get(client_module_key)
|
||||
if client_module is None:
|
||||
continue
|
||||
|
||||
service_client = getattr(client_module, client_attr, None)
|
||||
if service_client is None:
|
||||
continue
|
||||
|
||||
extractor_module = sys.modules.get(extractor_module_key)
|
||||
if extractor_module is None:
|
||||
try:
|
||||
import importlib
|
||||
|
||||
extractor_module = importlib.import_module(extractor_module_key)
|
||||
except ImportError as e:
|
||||
logger.debug(
|
||||
f"inventory graph_builder: cannot import extractor {extractor_module_key}: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
nodes, edges = extractor_module.extract(service_client)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory graph_builder: extractor {extractor_module_key} failed: "
|
||||
f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
for node in nodes:
|
||||
if node.id not in seen_node_ids:
|
||||
graph.add_node(node)
|
||||
seen_node_ids.add(node.id)
|
||||
|
||||
for edge in edges:
|
||||
graph.add_edge(edge)
|
||||
|
||||
return graph
|
||||
@@ -0,0 +1,502 @@
|
||||
"""
|
||||
inventory_output.py
|
||||
-------------------
|
||||
Writes the ConnectivityGraph produced by graph_builder to two files:
|
||||
|
||||
<output_path>.inventory.json – machine-readable graph (nodes + edges)
|
||||
<output_path>.inventory.html – interactive D3.js force-directed graph
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from dataclasses import asdict
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ConnectivityGraph
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# JSON output
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def write_json(graph: ConnectivityGraph, file_path: str) -> None:
|
||||
"""Serialise the graph to a JSON file."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
data = {
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
"nodes": [asdict(n) for n in graph.nodes],
|
||||
"edges": [asdict(e) for e in graph.edges],
|
||||
"stats": {
|
||||
"node_count": len(graph.nodes),
|
||||
"edge_count": len(graph.edges),
|
||||
},
|
||||
}
|
||||
with open(file_path, "w", encoding="utf-8") as fh:
|
||||
json.dump(data, fh, indent=2, default=str)
|
||||
logger.info(f"Inventory graph JSON written to {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory_output.write_json: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# HTML output (self-contained, D3.js CDN)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Colour palette per node type
|
||||
_NODE_COLOURS = {
|
||||
"lambda_function": "#f59e0b",
|
||||
"ec2_instance": "#3b82f6",
|
||||
"security_group": "#6366f1",
|
||||
"vpc": "#10b981",
|
||||
"subnet": "#34d399",
|
||||
"rds_instance": "#ef4444",
|
||||
"load_balancer": "#8b5cf6",
|
||||
"s3_bucket": "#06b6d4",
|
||||
"iam_role": "#f97316",
|
||||
"default": "#94a3b8",
|
||||
}
|
||||
|
||||
# Edge stroke colours per edge type
|
||||
_EDGE_COLOURS = {
|
||||
"network": "#64748b",
|
||||
"iam": "#f97316",
|
||||
"triggers": "#a855f7",
|
||||
"data_flow": "#0ea5e9",
|
||||
"depends_on": "#94a3b8",
|
||||
"routes_to": "#22c55e",
|
||||
"replicates_to": "#ec4899",
|
||||
"encrypts": "#eab308",
|
||||
"logs_to": "#78716c",
|
||||
}
|
||||
|
||||
_HTML_TEMPLATE = """\
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<title>Prowler – AWS Connectivity Graph</title>
|
||||
<script src="https://d3js.org/d3.v7.min.js"></script>
|
||||
<style>
|
||||
*, *::before, *::after {{ box-sizing: border-box; }}
|
||||
body {{
|
||||
margin: 0;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: #0f172a;
|
||||
color: #e2e8f0;
|
||||
}}
|
||||
#header {{
|
||||
padding: 12px 20px;
|
||||
background: #1e293b;
|
||||
border-bottom: 1px solid #334155;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
}}
|
||||
#header h1 {{ margin: 0; font-size: 18px; font-weight: 700; }}
|
||||
#header .stats {{ font-size: 13px; color: #94a3b8; }}
|
||||
#controls {{
|
||||
padding: 8px 20px;
|
||||
background: #1e293b;
|
||||
border-bottom: 1px solid #334155;
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
}}
|
||||
#controls label {{ font-size: 12px; color: #94a3b8; }}
|
||||
#controls select, #controls input[type=range] {{
|
||||
background: #0f172a;
|
||||
color: #e2e8f0;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 4px;
|
||||
padding: 3px 6px;
|
||||
font-size: 12px;
|
||||
}}
|
||||
#graph-container {{ width: 100%; height: calc(100vh - 100px); position: relative; }}
|
||||
svg {{ width: 100%; height: 100%; }}
|
||||
.node circle {{
|
||||
stroke: #1e293b;
|
||||
stroke-width: 1.5px;
|
||||
cursor: pointer;
|
||||
transition: r 0.15s;
|
||||
}}
|
||||
.node circle:hover {{ stroke-width: 3px; }}
|
||||
.node text {{
|
||||
font-size: 10px;
|
||||
fill: #e2e8f0;
|
||||
pointer-events: none;
|
||||
text-shadow: 0 0 4px #0f172a;
|
||||
}}
|
||||
.link {{
|
||||
stroke-opacity: 0.6;
|
||||
stroke-width: 1.5px;
|
||||
}}
|
||||
.link-label {{
|
||||
font-size: 8px;
|
||||
fill: #94a3b8;
|
||||
pointer-events: none;
|
||||
}}
|
||||
#tooltip {{
|
||||
position: fixed;
|
||||
background: #1e293b;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
font-size: 12px;
|
||||
pointer-events: none;
|
||||
max-width: 320px;
|
||||
word-break: break-all;
|
||||
z-index: 9999;
|
||||
display: none;
|
||||
}}
|
||||
#tooltip strong {{ color: #f8fafc; }}
|
||||
#tooltip .prop {{ color: #94a3b8; margin-top: 4px; }}
|
||||
#legend {{
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
right: 10px;
|
||||
background: rgba(30,41,59,0.9);
|
||||
border: 1px solid #334155;
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
font-size: 11px;
|
||||
}}
|
||||
#legend h3 {{ margin: 0 0 6px; font-size: 12px; }}
|
||||
.legend-row {{ display: flex; align-items: center; gap: 6px; margin: 3px 0; }}
|
||||
.legend-dot {{ width: 12px; height: 12px; border-radius: 50%; flex-shrink: 0; }}
|
||||
.legend-line {{ width: 20px; height: 2px; flex-shrink: 0; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="header">
|
||||
<h1>🔗 AWS Connectivity Graph</h1>
|
||||
<span class="stats" id="stat-label">Generated: {generated_at}</span>
|
||||
</div>
|
||||
<div id="controls">
|
||||
<label>Filter service:
|
||||
<select id="filter-service">
|
||||
<option value="">All services</option>
|
||||
</select>
|
||||
</label>
|
||||
<label>Link distance:
|
||||
<input type="range" id="link-distance" min="40" max="300" value="120"/>
|
||||
</label>
|
||||
<label>Charge strength:
|
||||
<input type="range" id="charge-strength" min="-800" max="-20" value="-250"/>
|
||||
</label>
|
||||
<span class="stats" id="visible-count"></span>
|
||||
</div>
|
||||
<div id="graph-container">
|
||||
<svg id="graph-svg"></svg>
|
||||
<div id="tooltip"></div>
|
||||
<div id="legend">
|
||||
<h3>Node types</h3>
|
||||
{legend_nodes_html}
|
||||
<h3 style="margin-top:8px">Edge types</h3>
|
||||
{legend_edges_html}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const RAW_NODES = {nodes_json};
|
||||
const RAW_EDGES = {edges_json};
|
||||
const NODE_COLOURS = {node_colours_json};
|
||||
const EDGE_COLOURS = {edge_colours_json};
|
||||
|
||||
// ── helpers ──────────────────────────────────────────────────────────────
|
||||
function nodeColour(d) {{
|
||||
return NODE_COLOURS[d.type] || NODE_COLOURS["default"];
|
||||
}}
|
||||
function edgeColour(d) {{
|
||||
return EDGE_COLOURS[d.edge_type] || "#94a3b8";
|
||||
}}
|
||||
function nodeRadius(d) {{
|
||||
const base = {{
|
||||
lambda_function: 9, ec2_instance: 10, vpc: 14, subnet: 8,
|
||||
security_group: 7, rds_instance: 11, load_balancer: 12,
|
||||
s3_bucket: 9, iam_role: 9
|
||||
}};
|
||||
return base[d.type] || 8;
|
||||
}}
|
||||
|
||||
// ── filter controls ───────────────────────────────────────────────────────
|
||||
const services = [...new Set(RAW_NODES.map(n => n.service))].sort();
|
||||
const sel = document.getElementById("filter-service");
|
||||
services.forEach(s => {{
|
||||
const o = document.createElement("option");
|
||||
o.value = s; o.textContent = s;
|
||||
sel.appendChild(o);
|
||||
}});
|
||||
|
||||
// ── D3 setup ──────────────────────────────────────────────────────────────
|
||||
const svg = d3.select("#graph-svg");
|
||||
const container = svg.append("g");
|
||||
|
||||
// zoom
|
||||
svg.call(
|
||||
d3.zoom().scaleExtent([0.05, 8])
|
||||
.on("zoom", e => container.attr("transform", e.transform))
|
||||
);
|
||||
|
||||
// arrowhead marker
|
||||
const defs = svg.append("defs");
|
||||
defs.append("marker")
|
||||
.attr("id", "arrow")
|
||||
.attr("viewBox", "0 -5 10 10")
|
||||
.attr("refX", 20).attr("refY", 0)
|
||||
.attr("markerWidth", 6).attr("markerHeight", 6)
|
||||
.attr("orient", "auto")
|
||||
.append("path")
|
||||
.attr("d", "M0,-5L10,0L0,5")
|
||||
.attr("fill", "#94a3b8");
|
||||
|
||||
// tooltip
|
||||
const tooltip = document.getElementById("tooltip");
|
||||
|
||||
// ── simulation ────────────────────────────────────────────────────────────
|
||||
let simulation, linkSel, nodeSel, labelSel;
|
||||
|
||||
function buildGraph(nodeFilter) {{
|
||||
// Determine which nodes to show
|
||||
const visibleNodes = nodeFilter
|
||||
? RAW_NODES.filter(n => n.service === nodeFilter)
|
||||
: RAW_NODES;
|
||||
const visibleIds = new Set(visibleNodes.map(n => n.id));
|
||||
|
||||
// Only show edges where BOTH endpoints are visible
|
||||
const visibleEdges = RAW_EDGES.filter(
|
||||
e => visibleIds.has(e.source_id) && visibleIds.has(e.target_id)
|
||||
);
|
||||
|
||||
document.getElementById("visible-count").textContent =
|
||||
`Showing ${{visibleNodes.length}} nodes · ${{visibleEdges.length}} edges`;
|
||||
|
||||
container.selectAll("*").remove();
|
||||
|
||||
if (simulation) simulation.stop();
|
||||
|
||||
const nodes = visibleNodes.map(n => ({{ ...n }}));
|
||||
const nodeIndex = Object.fromEntries(nodes.map(n => [n.id, n]));
|
||||
|
||||
const links = visibleEdges.map(e => ({{
|
||||
...e,
|
||||
source: nodeIndex[e.source_id] || e.source_id,
|
||||
target: nodeIndex[e.target_id] || e.target_id,
|
||||
}}));
|
||||
|
||||
const dist = +document.getElementById("link-distance").value;
|
||||
const charge = +document.getElementById("charge-strength").value;
|
||||
|
||||
simulation = d3.forceSimulation(nodes)
|
||||
.force("link", d3.forceLink(links).id(d => d.id).distance(dist))
|
||||
.force("charge", d3.forceManyBody().strength(charge))
|
||||
.force("center", d3.forceCenter(
|
||||
document.getElementById("graph-container").clientWidth / 2,
|
||||
document.getElementById("graph-container").clientHeight / 2
|
||||
))
|
||||
.force("collision", d3.forceCollide().radius(d => nodeRadius(d) + 6));
|
||||
|
||||
// Edges
|
||||
linkSel = container.append("g").attr("class", "links")
|
||||
.selectAll("line")
|
||||
.data(links)
|
||||
.join("line")
|
||||
.attr("class", "link")
|
||||
.attr("stroke", edgeColour)
|
||||
.attr("marker-end", "url(#arrow)");
|
||||
|
||||
// Edge labels
|
||||
labelSel = container.append("g").attr("class", "link-labels")
|
||||
.selectAll("text")
|
||||
.data(links)
|
||||
.join("text")
|
||||
.attr("class", "link-label")
|
||||
.text(d => d.label || "");
|
||||
|
||||
// Nodes
|
||||
nodeSel = container.append("g").attr("class", "nodes")
|
||||
.selectAll("g")
|
||||
.data(nodes)
|
||||
.join("g")
|
||||
.attr("class", "node")
|
||||
.call(
|
||||
d3.drag()
|
||||
.on("start", (event, d) => {{
|
||||
if (!event.active) simulation.alphaTarget(0.3).restart();
|
||||
d.fx = d.x; d.fy = d.y;
|
||||
}})
|
||||
.on("drag", (event, d) => {{ d.fx = event.x; d.fy = event.y; }})
|
||||
.on("end", (event, d) => {{
|
||||
if (!event.active) simulation.alphaTarget(0);
|
||||
d.fx = null; d.fy = null;
|
||||
}})
|
||||
)
|
||||
.on("mouseover", (event, d) => {{
|
||||
const props = Object.entries(d.properties || {{}})
|
||||
.map(([k, v]) => `<div class="prop"><b>${{k}}</b>: ${{v}}</div>`)
|
||||
.join("");
|
||||
tooltip.innerHTML = `
|
||||
<strong>${{d.name}}</strong>
|
||||
<div class="prop"><b>type</b>: ${{d.type}}</div>
|
||||
<div class="prop"><b>service</b>: ${{d.service}}</div>
|
||||
<div class="prop"><b>region</b>: ${{d.region}}</div>
|
||||
<div class="prop"><b>account</b>: ${{d.account_id}}</div>
|
||||
<div class="prop" style="word-break:break-all"><b>arn</b>: ${{d.id}}</div>
|
||||
${{props}}
|
||||
`;
|
||||
tooltip.style.display = "block";
|
||||
tooltip.style.left = (event.clientX + 12) + "px";
|
||||
tooltip.style.top = (event.clientY - 10) + "px";
|
||||
}})
|
||||
.on("mousemove", event => {{
|
||||
tooltip.style.left = (event.clientX + 12) + "px";
|
||||
tooltip.style.top = (event.clientY - 10) + "px";
|
||||
}})
|
||||
.on("mouseout", () => {{ tooltip.style.display = "none"; }});
|
||||
|
||||
nodeSel.append("circle")
|
||||
.attr("r", nodeRadius)
|
||||
.attr("fill", nodeColour);
|
||||
|
||||
nodeSel.append("text")
|
||||
.attr("dx", d => nodeRadius(d) + 3)
|
||||
.attr("dy", "0.35em")
|
||||
.text(d => d.name.length > 24 ? d.name.slice(0, 22) + "…" : d.name);
|
||||
|
||||
simulation.on("tick", () => {{
|
||||
linkSel
|
||||
.attr("x1", d => d.source.x)
|
||||
.attr("y1", d => d.source.y)
|
||||
.attr("x2", d => d.target.x)
|
||||
.attr("y2", d => d.target.y);
|
||||
|
||||
labelSel
|
||||
.attr("x", d => (d.source.x + d.target.x) / 2)
|
||||
.attr("y", d => (d.source.y + d.target.y) / 2);
|
||||
|
||||
nodeSel.attr("transform", d => `translate(${{d.x}},${{d.y}})`);
|
||||
}});
|
||||
}}
|
||||
|
||||
// Initial render
|
||||
buildGraph(null);
|
||||
|
||||
// Filter change
|
||||
sel.addEventListener("change", () => buildGraph(sel.value || null));
|
||||
|
||||
// Simulation control sliders — restart on change
|
||||
document.getElementById("link-distance").addEventListener("input", () => buildGraph(sel.value || null));
|
||||
document.getElementById("charge-strength").addEventListener("input", () => buildGraph(sel.value || null));
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def _build_legend_html(colours: dict, shape: str) -> str:
|
||||
rows = []
|
||||
for key, colour in sorted(colours.items()):
|
||||
if shape == "dot":
|
||||
rows.append(
|
||||
f'<div class="legend-row">'
|
||||
f'<div class="legend-dot" style="background:{colour}"></div>'
|
||||
f"<span>{key}</span></div>"
|
||||
)
|
||||
else:
|
||||
rows.append(
|
||||
f'<div class="legend-row">'
|
||||
f'<div class="legend-line" style="background:{colour}"></div>'
|
||||
f"<span>{key}</span></div>"
|
||||
)
|
||||
return "\n".join(rows)
|
||||
|
||||
|
||||
def write_html(graph: ConnectivityGraph, file_path: str) -> None:
|
||||
"""Render the graph as a self-contained interactive HTML page."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
|
||||
nodes_json = json.dumps(
|
||||
[
|
||||
{
|
||||
"id": n.id,
|
||||
"type": n.type,
|
||||
"name": n.name,
|
||||
"service": n.service,
|
||||
"region": n.region,
|
||||
"account_id": n.account_id,
|
||||
"properties": n.properties,
|
||||
}
|
||||
for n in graph.nodes
|
||||
],
|
||||
indent=None,
|
||||
default=str,
|
||||
)
|
||||
edges_json = json.dumps(
|
||||
[
|
||||
{
|
||||
"source_id": e.source_id,
|
||||
"target_id": e.target_id,
|
||||
"edge_type": e.edge_type,
|
||||
"label": e.label or "",
|
||||
}
|
||||
for e in graph.edges
|
||||
],
|
||||
indent=None,
|
||||
default=str,
|
||||
)
|
||||
|
||||
html = _HTML_TEMPLATE.format(
|
||||
generated_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M UTC"),
|
||||
nodes_json=nodes_json,
|
||||
edges_json=edges_json,
|
||||
node_colours_json=json.dumps(_NODE_COLOURS),
|
||||
edge_colours_json=json.dumps(_EDGE_COLOURS),
|
||||
legend_nodes_html=_build_legend_html(_NODE_COLOURS, "dot"),
|
||||
legend_edges_html=_build_legend_html(_EDGE_COLOURS, "line"),
|
||||
)
|
||||
|
||||
with open(file_path, "w", encoding="utf-8") as fh:
|
||||
fh.write(html)
|
||||
|
||||
logger.info(f"Inventory graph HTML written to {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory_output.write_html: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Convenience entry-point called from __main__.py
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def generate_inventory_outputs(output_path: str) -> None:
|
||||
"""
|
||||
Build the connectivity graph from currently-loaded service clients and write
|
||||
both JSON and HTML outputs.
|
||||
|
||||
Args:
|
||||
output_path: base file path WITHOUT extension, e.g.
|
||||
"output/prowler-output-20240101120000".
|
||||
The function appends .inventory.json and .inventory.html.
|
||||
"""
|
||||
from lib.graph_builder import build_graph
|
||||
|
||||
graph = build_graph()
|
||||
|
||||
if not graph.nodes:
|
||||
logger.warning(
|
||||
"Inventory graph: no nodes discovered. "
|
||||
"Make sure at least one AWS service was scanned before generating the inventory."
|
||||
)
|
||||
|
||||
write_json(graph, f"{output_path}.inventory.json")
|
||||
write_html(graph, f"{output_path}.inventory.html")
|
||||
@@ -0,0 +1,71 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResourceNode:
|
||||
"""
|
||||
Represents a single AWS resource as a node in the connectivity graph.
|
||||
|
||||
id : globally unique identifier — always the resource ARN
|
||||
type : coarse resource type used for grouping/colour, e.g. "lambda_function"
|
||||
name : human-readable label shown on the graph
|
||||
service : AWS service name, e.g. "lambda", "ec2", "rds"
|
||||
region : AWS region the resource lives in
|
||||
account_id: AWS account ID
|
||||
properties: additional resource-specific metadata (runtime, vpc_id, etc.)
|
||||
"""
|
||||
|
||||
id: str
|
||||
type: str
|
||||
name: str
|
||||
service: str
|
||||
region: str
|
||||
account_id: str
|
||||
properties: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResourceEdge:
|
||||
"""
|
||||
Represents a directional relationship between two resource nodes.
|
||||
|
||||
source_id : ARN of the source node
|
||||
target_id : ARN of the target node
|
||||
edge_type : semantic type of the relationship, e.g.:
|
||||
"network" – resources share a network path (VPC/subnet/SG)
|
||||
"iam" – IAM trust or permission relationship
|
||||
"triggers" – one resource can invoke another (event source → Lambda)
|
||||
"data_flow" – data is written/read (Lambda → SQS dead-letter queue)
|
||||
"depends_on" – soft dependency (Lambda layer, subnet belongs to VPC)
|
||||
"routes_to" – traffic routing (LB → target)
|
||||
"encrypts" – KMS key encrypts the resource
|
||||
label : optional short label rendered on the edge in the HTML graph
|
||||
"""
|
||||
|
||||
source_id: str
|
||||
target_id: str
|
||||
edge_type: str
|
||||
label: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConnectivityGraph:
|
||||
"""
|
||||
Container for the full inventory connectivity graph.
|
||||
|
||||
nodes: all discovered resource nodes
|
||||
edges: all discovered edges between nodes
|
||||
"""
|
||||
|
||||
nodes: List[ResourceNode] = field(default_factory=list)
|
||||
edges: List[ResourceEdge] = field(default_factory=list)
|
||||
|
||||
def add_node(self, node: ResourceNode) -> None:
|
||||
self.nodes.append(node)
|
||||
|
||||
def add_edge(self, edge: ResourceEdge) -> None:
|
||||
self.edges.append(edge)
|
||||
|
||||
def node_ids(self) -> set:
|
||||
return {n.id for n in self.nodes}
|
||||
@@ -2,47 +2,378 @@
|
||||
title: 'Creating a New Security Compliance Framework in Prowler'
|
||||
---
|
||||
|
||||
This guide explains how to add a new security compliance framework to Prowler, end to end. It covers directory layout, the JSON schema, check mapping conventions, the Pydantic models that validate each framework, the CSV output formatter, local validation, testing, and the pull request process.
|
||||
|
||||
## Introduction
|
||||
|
||||
To create or contribute a custom security framework for Prowler—or to integrate a public framework—you must ensure the necessary checks are available. If they are missing, they must be implemented before proceeding.
|
||||
A compliance framework in Prowler maps a public or custom control catalog (for example CIS, NIST 800-53, PCI DSS, HIPAA, ENS, CCC) to the security checks that Prowler already runs. Each requirement links to zero, one or more Prowler checks. When a scan executes, findings are aggregated per requirement to produce the compliance report rendered by Prowler CLI and Prowler Cloud.
|
||||
|
||||
Each framework is defined in a compliance file per provider. The file should follow the structure used in `prowler/compliance/<provider>/` and be named `<framework>_<version>_<provider>.json`. Follow the format below to create your own.
|
||||
Prowler ships with 85+ compliance frameworks across All Providers. The catalog lives under `prowler/compliance/<provider>/` (or `prowler/compliance/` for universal compliance frameworks)
|
||||
|
||||
## Compliance Framework
|
||||
<Warning>
|
||||
A compliance framework must represent the **complete state** of the source catalog. Every requirement defined by the framework has to be present in the JSON file, even when none of the existing Prowler checks can automate it. In that case, leave `Checks` as an empty array, but do not omit the requirement.
|
||||
|
||||
### Compliance Framework Structure
|
||||
Requirement coverage feeds the compliance percentage calculations and the metadata surfaces (dashboards, widgets, exports). Missing requirements skew those metrics and break the report as a faithful snapshot of the framework.
|
||||
</Warning>
|
||||
|
||||
Each compliance framework file consists of structured metadata that identifies the framework and maps security checks to requirements or controls. Please note that a single requirement can be linked to multiple Prowler checks:
|
||||
### Prerequisites
|
||||
|
||||
- `Framework`: string – The distinguished name of the framework (e.g., CIS).
|
||||
- `Provider`: string – The cloud provider where the framework applies (AWS, Azure, OCI).
|
||||
- `Version`: string – The framework version (e.g., 1.4 for CIS).
|
||||
- `Requirements`: array of objects. – Defines security requirements and their mapping to Prowler checks. All requirements or controls are to be included with the mapping to Prowler.
|
||||
- `Requirements_Id`: string – A unique identifier for each requirement within the framework
|
||||
- `Requirements_Description`: string – The requirement description as specified in the framework.
|
||||
- `Requirements_Attributes`: array of objects. – Contains relevant metadata such as security levels, sections, and any additional data needed for reporting with the result of the findings. Attributes should be derived directly from the framework’s own terminology, ensuring consistency with its established definitions.
|
||||
- `Requirements_Checks`: array. The Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case automation is not feasible, this can be empty.
|
||||
Before adding a new framework, complete the following checks:
|
||||
|
||||
- **Verify the framework is not already supported.** Inspect `prowler/compliance/<provider>/` for an existing JSON file matching the name and version.
|
||||
- **Confirm the required checks exist.** Every requirement that can be automated must point to one or more existing Prowler checks. For each missing check, implement it first by following the [Prowler Checks](/developer-guide/checks) guide.
|
||||
- **Review a reference framework.** Use an existing framework with a similar structure as your template. `cis_2.0_aws.json` is the canonical reference for CIS-style frameworks. `ccc_aws.json`, `ens_rd2022_aws.json`, and `nist_800_53_revision_5_aws.json` illustrate other attribute shapes.
|
||||
|
||||
## Four-Layer Architecture
|
||||
|
||||
A compliance framework spans four layers. A complete contribution must touch each layer that applies.
|
||||
|
||||
- **Layer 1 – Schema validation:** The Pydantic models in `prowler/lib/check/compliance_models.py` define the canonical schema for each attribute shape (CIS, ENS, Mitre, CCC, C5, CSA CCM, ISO 27001, KISA ISMS-P, AWS Well-Architected, Prowler ThreatScore, and a generic fallback).
|
||||
- **Layer 2 – JSON catalog:** The framework JSON file in `prowler/compliance/<provider>/` lists every requirement and maps it to checks.
|
||||
- **Layer 3 – Output formatter:** The Python module in `prowler/lib/outputs/compliance/<framework>/` builds the CSV row model, the per-provider transformer, and the CLI summary table.
|
||||
- **Layer 4 – Output dispatchers:** The dispatchers in `prowler/lib/outputs/compliance/compliance.py` and `prowler/lib/outputs/compliance/compliance_output.py` route findings to the right formatter based on the framework identifier.
|
||||
|
||||
The rest of this guide walks each layer in order.
|
||||
|
||||
## Directory Structure and File Naming
|
||||
|
||||
Compliance frameworks live at:
|
||||
|
||||
```
|
||||
prowler/compliance/<provider>/<framework>_<version>_<provider>.json
|
||||
```
|
||||
|
||||
The filename conventions are:
|
||||
|
||||
- All lowercase, words separated with underscores.
|
||||
- `<provider>` is a supported provider identifier: `aws`, `azure`, `gcp`, `kubernetes`, `m365`, `github`, `googleworkspace`, `alibabacloud`, `oraclecloud`, `cloudflare`, `mongodbatlas`, `nhn`, `openstack`, `iac`, `llm`.
|
||||
- `<version>` is optional. Omit it when the framework has no versioning, as in `ccc_aws.json`.
|
||||
- The file basename (without `.json`) is the framework key that Prowler CLI accepts via `--compliance`.
|
||||
|
||||
Examples:
|
||||
|
||||
- `prowler/compliance/aws/cis_2.0_aws.json`
|
||||
- `prowler/compliance/aws/nist_800_53_revision_5_aws.json`
|
||||
- `prowler/compliance/azure/ens_rd2022_azure.json`
|
||||
- `prowler/compliance/kubernetes/cis_1.10_kubernetes.json`
|
||||
- `prowler/compliance/aws/ccc_aws.json`
|
||||
|
||||
The output formatter directory mirrors the framework name:
|
||||
|
||||
```
|
||||
prowler/lib/outputs/compliance/<framework>/
|
||||
├── <framework>.py # CLI summary-table dispatcher
|
||||
├── <framework>_<provider>.py # Per-provider transformer class
|
||||
├── models.py # Pydantic CSV row model
|
||||
└── __init__.py
|
||||
```
|
||||
|
||||
## JSON Schema Reference
|
||||
|
||||
Every compliance file is a JSON document with the following top-level keys.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|---|---|---|---|
|
||||
| `Framework` | string | Yes | Canonical framework identifier, for example `CIS`, `NIST-800-53-Revision-5`, `ENS`, `CCC`. |
|
||||
| `Name` | string | Yes | Human-readable framework name displayed by Prowler App. |
|
||||
| `Version` | string | Yes | Framework version, for example `2.0`. Use an empty string only for frameworks without versioning. See [Version Handling](#version-handling). |
|
||||
| `Provider` | string | Yes | Upper-cased provider identifier: `AWS`, `AZURE`, `GCP`, `KUBERNETES`, `M365`, `GITHUB`, `GOOGLEWORKSPACE`, and so on. |
|
||||
| `Description` | string | Yes | Short description of the framework's scope and purpose. |
|
||||
| `Requirements` | array | Yes | List of [requirement objects](#requirement-object). |
|
||||
|
||||
### Requirement Object
|
||||
|
||||
Each entry in `Requirements` describes one control or requirement.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|---|---|---|---|
|
||||
| `Id` | string | Yes | Unique identifier within the framework, for example `1.10` or `CCC.Core.CN01.AR01`. |
|
||||
| `Name` | string | No | Optional human-readable name used by frameworks that distinguish control name from description, such as NIST. |
|
||||
| `Description` | string | Yes | Verbatim description from the source framework. |
|
||||
| `Attributes` | array | Yes | List of [attribute objects](#attribute-objects). The shape depends on the framework. |
|
||||
| `Checks` | array of strings | Yes | Prowler check identifiers that automate the requirement. Leave the list empty when the control cannot be automated. |
|
||||
|
||||
### Attribute Objects
|
||||
|
||||
Attributes carry the metadata that Prowler App and the CSV output display for each requirement. The object shape is framework-specific and is validated by a dedicated Pydantic model in `prowler/lib/check/compliance_models.py`. The most common shapes are summarized below.
|
||||
|
||||
#### CIS_Requirement_Attribute
|
||||
|
||||
Used by every CIS benchmark.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `Section` | string | Yes | Top-level section, for example `1 Identity and Access Management`. |
|
||||
| `SubSection` | string | No | Optional second-level grouping. |
|
||||
| `Profile` | enum | Yes | One of `Level 1`, `Level 2`, `E3 Level 1`, `E3 Level 2`, `E5 Level 1`, `E5 Level 2`. |
|
||||
| `AssessmentStatus` | enum | Yes | `Manual` or `Automated`. |
|
||||
| `Description` | string | Yes | Control description. |
|
||||
| `RationaleStatement` | string | Yes | Reason the control exists. |
|
||||
| `ImpactStatement` | string | Yes | Impact of non-compliance. |
|
||||
| `RemediationProcedure` | string | Yes | Remediation steps. |
|
||||
| `AuditProcedure` | string | Yes | Audit steps. |
|
||||
| `AdditionalInformation` | string | Yes | Free-form notes. |
|
||||
| `DefaultValue` | string | No | Default configuration value, when relevant. |
|
||||
| `References` | string | Yes | Colon-separated list of reference URLs. |
|
||||
|
||||
#### ENS_Requirement_Attribute
|
||||
|
||||
Used by the Spanish ENS (Esquema Nacional de Seguridad) frameworks.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `IdGrupoControl` | string | Yes | Control group identifier. |
|
||||
| `Marco` | string | Yes | Framework block (`operacional`, `organizativo`, `proteccion`). |
|
||||
| `Categoria` | string | Yes | Control category. |
|
||||
| `DescripcionControl` | string | Yes | Control description in Spanish. |
|
||||
| `Tipo` | enum | Yes | `refuerzo`, `requisito`, `recomendacion`, `medida`. |
|
||||
| `Nivel` | enum | Yes | `opcional`, `bajo`, `medio`, `alto`. |
|
||||
| `Dimensiones` | array of enum | Yes | Subset of `confidencialidad`, `integridad`, `trazabilidad`, `autenticidad`, `disponibilidad`. |
|
||||
| `ModoEjecucion` | string | Yes | Execution mode (`manual`, `automático`, `híbrido`). |
|
||||
| `Dependencias` | array of strings | Yes | Ids of prerequisite controls. Empty list when none. |
|
||||
|
||||
#### CCC_Requirement_Attribute
|
||||
|
||||
Used by the Common Cloud Controls Catalog.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `FamilyName` | string | Yes | Control family, for example `Data`. |
|
||||
| `FamilyDescription` | string | Yes | Description of the family. |
|
||||
| `Section` | string | Yes | Section title. |
|
||||
| `SubSection` | string | Yes | Subsection title, or empty string. |
|
||||
| `SubSectionObjective` | string | Yes | Stated objective for the subsection. |
|
||||
| `Applicability` | array of strings | Yes | Applicability tags such as `tlp-green`, `tlp-amber`, `tlp-red`. |
|
||||
| `Recommendation` | string | Yes | Implementation recommendation. |
|
||||
| `SectionThreatMappings` | array of objects | Yes | Each entry has `ReferenceId` and `Identifiers`. |
|
||||
| `SectionGuidelineMappings` | array of objects | Yes | Each entry has `ReferenceId` and `Identifiers`. |
|
||||
|
||||
#### Generic_Compliance_Requirement_Attribute
|
||||
|
||||
The fallback attribute model used when no framework-specific schema applies (for example NIST 800-53, PCI DSS, GDPR, HIPAA).
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `ItemId` | string | No | Item identifier. |
|
||||
| `Section` | string | No | Section name. |
|
||||
| `SubSection` | string | No | Subsection name. |
|
||||
| `SubGroup` | string | No | Subgroup name. |
|
||||
| `Service` | string | No | Affected service, for example `aws`, `iam`. |
|
||||
| `Type` | string | No | Control type. |
|
||||
| `Comment` | string | No | Free-form comment. |
|
||||
|
||||
Additional per-framework attribute models exist for `AWS_Well_Architected_Requirement_Attribute`, `ISO27001_2013_Requirement_Attribute`, `Mitre_Requirement_Attribute_<Provider>`, `KISA_ISMSP_Requirement_Attribute`, `Prowler_ThreatScore_Requirement_Attribute`, `C5Germany_Requirement_Attribute`, and `CSA_CCM_Requirement_Attribute`. Consult `prowler/lib/check/compliance_models.py` for their full field sets.
|
||||
|
||||
<Note>
|
||||
The `Attributes` field is a Pydantic `Union`. The generic attribute model must remain the last element of that Union, otherwise Pydantic v1 silently coerces every framework into the generic shape and your specialized fields are dropped.
|
||||
</Note>
|
||||
|
||||
## Minimal Working Example
|
||||
|
||||
The following snippet is a complete, valid framework file named `my_framework_1.0_aws.json`, saved at `prowler/compliance/aws/my_framework_1.0_aws.json`. It uses the generic attribute shape for simplicity.
|
||||
|
||||
```json title="prowler/compliance/aws/my_framework_1.0_aws.json"
|
||||
{
|
||||
"Framework": "<framework>-<provider>",
|
||||
"Version": "<version>",
|
||||
"Framework": "My-Framework",
|
||||
"Name": "My Framework 1.0 for AWS",
|
||||
"Version": "1.0",
|
||||
"Provider": "AWS",
|
||||
"Description": "Internal baseline for AWS accounts.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "<unique-id>",
|
||||
"Description": "Full description of the requirement",
|
||||
"Checks": [
|
||||
"Here is the prowler check or checks that will be executed"
|
||||
],
|
||||
"Id": "MF-1.1",
|
||||
"Description": "Root account must have multi-factor authentication enabled.",
|
||||
"Attributes": [
|
||||
{
|
||||
<Add here your custom attributes.>
|
||||
"ItemId": "MF-1.1",
|
||||
"Section": "Identity and Access Management",
|
||||
"SubSection": "Root Account",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_root_hardware_mfa_enabled"
|
||||
]
|
||||
},
|
||||
...
|
||||
{
|
||||
"Id": "MF-2.1",
|
||||
"Description": "S3 buckets must block public access at the account level.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "MF-2.1",
|
||||
"Section": "Data Protection",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_account_level_public_access_blocks"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`. Also, you need to add a new conditional in `prowler/lib/outputs/file_descriptors.py` if creating a new CSV model.
|
||||
## Mapping Checks to Requirements
|
||||
|
||||
Each requirement links to the Prowler checks that, together, produce a PASS or FAIL verdict for that control.
|
||||
|
||||
- **Include every requirement from the source catalog.** The framework file must mirror the full control list, one-to-one. Compliance percentages, dashboards, and exported metadata are computed against the total requirement count, so omitting an unmappable control inflates coverage and misrepresents the framework.
|
||||
- List every check by its canonical identifier, the value of `CheckID` inside the check's `.metadata.json` file.
|
||||
- One requirement can reference multiple checks. The requirement is evaluated as FAIL when any referenced check produces a FAIL finding for a resource in scope.
|
||||
- Leave `Checks` as an empty array when the requirement cannot be automated. The requirement still appears in the report, contributes to the total, and resolves to `MANUAL`. An empty mapping is valid; a missing requirement is not.
|
||||
- Reuse checks across requirements when the same control applies in multiple places. Do not duplicate check logic to match framework structure.
|
||||
- Avoid referencing checks from a different provider. A compliance file is bound to one provider, and cross-provider checks will never match findings in the scan.
|
||||
|
||||
To discover available checks, run:
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> --list-checks
|
||||
```
|
||||
|
||||
## Supporting Multiple Providers
|
||||
|
||||
Each compliance file targets a single provider. To cover several providers with the same framework (for example CIS across AWS, Azure, and GCP), ship one JSON file per provider:
|
||||
|
||||
```
|
||||
prowler/compliance/aws/cis_2.0_aws.json
|
||||
prowler/compliance/azure/cis_2.0_azure.json
|
||||
prowler/compliance/gcp/cis_2.0_gcp.json
|
||||
```
|
||||
|
||||
Keep the `Framework` and `Version` values identical across the files so the dispatcher matches them, and change only the `Provider`, `Checks`, and provider-specific metadata.
|
||||
|
||||
The CIS output formatter already supports every provider listed above. For a brand-new framework that spans several providers, add one transformer per provider in `prowler/lib/outputs/compliance/<framework>/` and extend the summary-table dispatcher accordingly. See [Output Formatter](#output-formatter).
|
||||
|
||||
## Output Formatter
|
||||
|
||||
Prowler renders every compliance framework in two forms: a detailed CSV report written to disk, and a summary table printed in the CLI. Both are produced by the output formatter package for the framework.
|
||||
|
||||
For a new framework named `my_framework`, create:
|
||||
|
||||
```
|
||||
prowler/lib/outputs/compliance/my_framework/
|
||||
├── __init__.py
|
||||
├── my_framework.py # CLI summary table dispatcher
|
||||
├── my_framework_aws.py # Per-provider transformer
|
||||
└── models.py # CSV row Pydantic model
|
||||
```
|
||||
|
||||
### Step 1 – Define the CSV Row Model
|
||||
|
||||
In `models.py`, declare a Pydantic v1 model with one field per CSV column. Use existing models such as `AWSCISModel` in `prowler/lib/outputs/compliance/cis/models.py` as the reference. Fields typically include `Provider`, `Description`, `AccountId`, `Region`, `AssessmentDate`, `Requirements_Id`, `Requirements_Description`, one `Requirements_Attributes_*` field per attribute key, plus the finding fields `Status`, `StatusExtended`, `ResourceId`, `ResourceName`, `CheckId`, `Muted`, `Framework`, `Name`.
|
||||
|
||||
### Step 2 – Implement the Transformer Class
|
||||
|
||||
In `my_framework_aws.py`, subclass `ComplianceOutput` from `prowler.lib.outputs.compliance.compliance_output` and implement `transform(findings, compliance, compliance_name)`. Iterate over `findings`, match each finding to the requirements it satisfies through `finding.compliance.get(compliance_name, [])`, and append one row per attribute to `self._data`.
|
||||
|
||||
### Step 3 – Add the Summary-Table Dispatcher
|
||||
|
||||
In `my_framework.py`, implement `get_my_framework_table(findings, bulk_checks_metadata, compliance_framework, output_filename, output_directory, compliance_overview)` following the pattern in `prowler/lib/outputs/compliance/cis/cis.py`.
|
||||
|
||||
### Step 4 – Register the Framework in the Dispatchers
|
||||
|
||||
- Add the dispatcher call in `prowler/lib/outputs/compliance/compliance.py`, inside `display_compliance_table`, with a branch such as `elif "my_framework" in compliance_framework:`.
|
||||
- Register the CSV model and transformer in `prowler/lib/outputs/compliance/compliance_output.py` so the CSV file is emitted during the scan.
|
||||
|
||||
<Note>
|
||||
For NIST-style catalogs that use `Generic_Compliance_Requirement_Attribute`, no custom formatter is needed. The generic formatter in `prowler/lib/outputs/compliance/generic/` handles them automatically, provided the JSON validates against the generic attribute schema.
|
||||
</Note>
|
||||
|
||||
## Version Handling
|
||||
|
||||
Prowler matches frameworks by concatenating `Framework` and `Version`. A missing or empty `Version` collapses several frameworks to the same key and breaks CLI filtering with `--compliance`.
|
||||
|
||||
- Always set `Version` to a non-empty string, even for frameworks that rename editions rather than version them. Use the edition identifier (for example `RD2022`, `v2025.10`, `4.0`).
|
||||
- When the source catalog has no version, use the first year of adoption or the release date.
|
||||
- Make sure the version substring embedded in the filename matches `Version`, because the CLI dispatcher reads `compliance_framework.split("_")[1]` to select the correct version.
|
||||
|
||||
## Validating the Framework Locally
|
||||
|
||||
Follow the steps below before opening a pull request.
|
||||
|
||||
### 1. Run the Compliance Model Validator
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> --list-compliance
|
||||
```
|
||||
|
||||
The framework must appear in the output. A validation error indicates a schema mismatch between the JSON file and the attribute model.
|
||||
|
||||
### 2. Run a Scan Filtered by the Framework
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> \
|
||||
--compliance <framework>_<version>_<provider> \
|
||||
--log-level ERROR
|
||||
```
|
||||
|
||||
Verify that:
|
||||
|
||||
- Prowler produces a CSV file under `output/compliance/` with the expected name.
|
||||
- The CLI summary table lists every section in the framework.
|
||||
- Findings roll up under the expected requirements.
|
||||
|
||||
### 3. Inspect the CSV Output
|
||||
|
||||
Open the generated CSV and confirm:
|
||||
|
||||
- All columns defined in `models.py` appear.
|
||||
- Every requirement has at least one row per scanned resource.
|
||||
- Values such as `Requirements_Attributes_Section` reflect the JSON content.
|
||||
|
||||
### 4. Verify the Framework in Prowler App
|
||||
|
||||
Launch Prowler App locally (`docker compose up` from the repository root) and run a scan with the new compliance framework. Confirm the compliance page renders the requirements, sections, and status widgets correctly.
|
||||
|
||||
## Testing
|
||||
|
||||
Compliance contributions require two layers of tests.
|
||||
|
||||
- **Schema tests** exercise the Pydantic models. Extend `tests/lib/check/universal_compliance_models_test.py` with a case that loads the new JSON file and asserts the attribute type matches the expected model.
|
||||
- **Output tests** exercise the transformer. Mirror the structure under `tests/lib/outputs/compliance/<framework>/` with fixtures that feed synthetic findings through the transformer and assert the resulting CSV rows.
|
||||
|
||||
Run the suite with:
|
||||
|
||||
```bash
|
||||
poetry run pytest -n auto tests/lib/check/universal_compliance_models_test.py \
|
||||
tests/lib/outputs/compliance/
|
||||
```
|
||||
|
||||
For guidance on writing Prowler SDK tests, refer to [Unit Testing](/developer-guide/unit-testing).
|
||||
|
||||
## Submitting the Pull Request
|
||||
|
||||
Before opening the pull request:
|
||||
|
||||
1. Run the complete QA pipeline:
|
||||
```bash
|
||||
poetry run pre-commit run --all-files
|
||||
poetry run pytest -n auto
|
||||
```
|
||||
2. Add a changelog entry under the `### 🚀 Added` section of `prowler/CHANGELOG.md`, describing the new framework and the providers it covers.
|
||||
3. Follow the [Pull Request Template](https://github.com/prowler-cloud/prowler/blob/master/.github/pull_request_template.md) and set the PR title using Conventional Commits, for example `feat(compliance): add My Framework 1.0 for AWS`.
|
||||
4. Request review from the compliance codeowners listed in `.github/CODEOWNERS`.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
The following issues are the most common when contributing a compliance framework.
|
||||
|
||||
- **`ValidationError: field required` during scan.** The JSON is missing a required attribute field. Re-check the matching Pydantic model in `prowler/lib/check/compliance_models.py`.
|
||||
- **All attributes collapse to `Generic_Compliance_Requirement_Attribute` values.** The Pydantic `Union` is ordered incorrectly, or the JSON matches only the generic shape. Move the generic model to the last Union position and ensure every required field is present in the JSON.
|
||||
- **`--compliance` filter does not find the framework.** The filename does not match the expected pattern `<framework>_<version>_<provider>.json`, the version is empty, or the file lives outside `prowler/compliance/<provider>/`.
|
||||
- **CLI summary table is empty but the CSV is populated.** The dispatcher branch in `prowler/lib/outputs/compliance/compliance.py` is missing or its substring match does not catch the framework key.
|
||||
- **CSV file is missing after the scan.** The transformer class is not registered in `prowler/lib/outputs/compliance/compliance_output.py`, or `transform()` raises silently. Run the scan with `--log-level DEBUG`.
|
||||
- **Findings do not roll up under a requirement.** A check listed in `Checks` either does not exist for that provider or is spelled incorrectly. Run `--list-checks | grep <check_name>` to confirm.
|
||||
|
||||
## Reference Examples
|
||||
|
||||
Use the following files as templates when modeling a new contribution.
|
||||
|
||||
- `prowler/compliance/aws/cis_2.0_aws.json` – CIS attribute shape.
|
||||
- `prowler/compliance/aws/nist_800_53_revision_5_aws.json` – Generic attribute shape.
|
||||
- `prowler/compliance/aws/ccc_aws.json` – CCC attribute shape.
|
||||
- `prowler/compliance/azure/ens_rd2022_azure.json` – ENS attribute shape.
|
||||
- `prowler/lib/check/compliance_models.py` – Canonical Pydantic schemas.
|
||||
- `prowler/lib/outputs/compliance/cis/` – Reference implementation of a multi-provider output formatter.
|
||||
- `prowler/lib/outputs/compliance/generic/` – Reference implementation of a generic output formatter.
|
||||
|
||||
@@ -119,6 +119,7 @@
|
||||
"user-guide/tutorials/prowler-app-multi-tenant",
|
||||
"user-guide/tutorials/prowler-app-api-keys",
|
||||
"user-guide/tutorials/prowler-app-import-findings",
|
||||
"user-guide/tutorials/prowler-app-alerts",
|
||||
{
|
||||
"group": "Mutelist",
|
||||
"expanded": true,
|
||||
@@ -176,7 +177,6 @@
|
||||
"pages": [
|
||||
"user-guide/cli/tutorials/misc",
|
||||
"user-guide/cli/tutorials/reporting",
|
||||
"user-guide/cli/tutorials/compliance",
|
||||
"user-guide/cli/tutorials/dashboard",
|
||||
"user-guide/cli/tutorials/configuration_file",
|
||||
"user-guide/cli/tutorials/logging",
|
||||
@@ -332,12 +332,20 @@
|
||||
"user-guide/providers/vercel/getting-started-vercel",
|
||||
"user-guide/providers/vercel/authentication"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Okta",
|
||||
"pages": [
|
||||
"user-guide/providers/okta/getting-started-okta",
|
||||
"user-guide/providers/okta/authentication"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Compliance",
|
||||
"pages": [
|
||||
"user-guide/compliance/tutorials/compliance",
|
||||
"user-guide/compliance/tutorials/threatscore"
|
||||
]
|
||||
},
|
||||
@@ -503,6 +511,10 @@
|
||||
}
|
||||
},
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/user-guide/cli/tutorials/compliance",
|
||||
"destination": "/user-guide/compliance/tutorials/compliance"
|
||||
},
|
||||
{
|
||||
"source": "/projects/prowler-open-source/en/latest/tutorials/prowler-app-lighthouse",
|
||||
"destination": "/user-guide/tutorials/prowler-app-lighthouse"
|
||||
|
||||
@@ -121,8 +121,8 @@ To update the environment file:
|
||||
Edit the `.env` file and change version values:
|
||||
|
||||
```env
|
||||
PROWLER_UI_VERSION="5.25.3"
|
||||
PROWLER_API_VERSION="5.25.3"
|
||||
PROWLER_UI_VERSION="5.26.1"
|
||||
PROWLER_API_VERSION="5.26.1"
|
||||
```
|
||||
|
||||
<Note>
|
||||
|
||||
|
After Width: | Height: | Size: 38 KiB |
|
After Width: | Height: | Size: 48 KiB |
|
After Width: | Height: | Size: 534 KiB |
|
After Width: | Height: | Size: 659 KiB |
|
After Width: | Height: | Size: 759 KiB |
|
After Width: | Height: | Size: 62 KiB |
|
After Width: | Height: | Size: 534 KiB |
|
After Width: | Height: | Size: 257 KiB |
|
After Width: | Height: | Size: 399 KiB |
|
After Width: | Height: | Size: 425 KiB |
|
After Width: | Height: | Size: 88 KiB |
|
After Width: | Height: | Size: 222 KiB |
@@ -47,11 +47,12 @@ Prowler supports a wide range of providers organized by category:
|
||||
| Provider | Support | Audit Scope/Entities | Interface |
|
||||
| ----------------------------------------------------------------------------------------- | -------- | ---------------------------- | ------------ |
|
||||
| [GitHub](/user-guide/providers/github/getting-started-github) | Official | Organizations / Repositories | UI, API, CLI |
|
||||
| [Google Workspace](/user-guide/providers/googleworkspace/getting-started-googleworkspace) | Official | Domains | CLI |
|
||||
| [Google Workspace](/user-guide/providers/googleworkspace/getting-started-googleworkspace) | Official | Domains | UI, API, CLI |
|
||||
| [LLM](/user-guide/providers/llm/getting-started-llm) | Official | Models | CLI |
|
||||
| [M365](/user-guide/providers/microsoft365/getting-started-m365) | Official | Tenants | UI, API, CLI |
|
||||
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | Organizations | UI, API, CLI |
|
||||
| [Vercel](/user-guide/providers/vercel/getting-started-vercel) | Official | Teams / Projects | CLI |
|
||||
| [Okta](/user-guide/providers/okta/getting-started-okta) | Official | Organizations | CLI |
|
||||
| [Vercel](/user-guide/providers/vercel/getting-started-vercel) | Official | Teams / Projects | UI, API, CLI |
|
||||
|
||||
### Kubernetes
|
||||
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
export const VersionBadge = ({ version }) => {
|
||||
return (
|
||||
<code className="version-badge-container">
|
||||
<p className="version-badge">
|
||||
<span className="version-badge-label">Added in:</span>
|
||||
<code className="version-badge-version">{version}</code>
|
||||
</p>
|
||||
</code>
|
||||
|
||||
|
||||
<a
|
||||
href={`https://github.com/prowler-cloud/prowler/releases/tag/${version}`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="version-badge-link"
|
||||
>
|
||||
<span className="version-badge-container">
|
||||
<span className="version-badge">
|
||||
<span className="version-badge-label">Added in:</span>
|
||||
<span className="version-badge-version">{version}</span>
|
||||
</span>
|
||||
</span>
|
||||
</a>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,4 +1,21 @@
|
||||
/* Version Badge Styling */
|
||||
.version-badge-link,
|
||||
.version-badge-link:hover,
|
||||
.version-badge-link:focus,
|
||||
.version-badge-link:active,
|
||||
.version-badge-link:visited {
|
||||
display: inline-block;
|
||||
text-decoration: none !important;
|
||||
background-image: none !important;
|
||||
border-bottom: none !important;
|
||||
color: inherit;
|
||||
transition: opacity 0.15s ease-in-out;
|
||||
}
|
||||
|
||||
.version-badge-link:hover {
|
||||
opacity: 0.85;
|
||||
}
|
||||
|
||||
.version-badge-container {
|
||||
display: inline-block;
|
||||
margin: 0 0 1rem 0;
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
---
|
||||
title: 'Compliance'
|
||||
---
|
||||
|
||||
Prowler allows you to execute checks based on requirements defined in compliance frameworks. By default, it will execute and give you an overview of the status of each compliance framework:
|
||||
|
||||
<img src="/images/cli/compliance/compliance.png" />
|
||||
|
||||
You can find CSVs containing detailed compliance results in the compliance folder within Prowler's output folder.
|
||||
|
||||
## Execute Prowler based on Compliance Frameworks
|
||||
|
||||
Prowler can analyze your environment based on a specific compliance framework and get more details, to do it, you can use option `--compliance`:
|
||||
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
|
||||
Standard results will be shown and additionally the framework information as the sample below for CIS AWS 2.0. For details a CSV file has been generated as well.
|
||||
|
||||
<img src="/images/cli/compliance/compliance-cis-sample1.png" />
|
||||
|
||||
<Note>
|
||||
**If Prowler can't find a resource related with a check from a compliance requirement, this requirement won't appear on the output**
|
||||
</Note>
|
||||
|
||||
## List Available Compliance Frameworks
|
||||
|
||||
To see which compliance frameworks are covered by Prowler, use the `--list-compliance` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance
|
||||
```
|
||||
|
||||
Or you can visit [Prowler Hub](https://hub.prowler.com/compliance).
|
||||
|
||||
## List Requirements of Compliance Frameworks
|
||||
To list requirements for a compliance framework, use the `--list-compliance-requirements` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance-requirements <compliance_framework(s)>
|
||||
```
|
||||
|
||||
Example for the first requirements of CIS 1.5 for AWS:
|
||||
|
||||
```
|
||||
Listing CIS 1.5 AWS Compliance Requirements:
|
||||
|
||||
Requirement Id: 1.1
|
||||
- Description: Maintain current contact details
|
||||
- Checks:
|
||||
account_maintain_current_contact_details
|
||||
|
||||
Requirement Id: 1.2
|
||||
- Description: Ensure security contact information is registered
|
||||
- Checks:
|
||||
account_security_contact_information_is_registered
|
||||
|
||||
Requirement Id: 1.3
|
||||
- Description: Ensure security questions are registered in the AWS account
|
||||
- Checks:
|
||||
account_security_questions_are_registered_in_the_aws_account
|
||||
|
||||
Requirement Id: 1.4
|
||||
- Description: Ensure no 'root' user account access key exists
|
||||
- Checks:
|
||||
iam_no_root_access_key
|
||||
|
||||
Requirement Id: 1.5
|
||||
- Description: Ensure MFA is enabled for the 'root' user account
|
||||
- Checks:
|
||||
iam_root_mfa_enabled
|
||||
|
||||
[redacted]
|
||||
|
||||
```
|
||||
|
||||
## Create and contribute adding other Security Frameworks
|
||||
|
||||
This information is part of the Developer Guide and can be found [here](/developer-guide/security-compliance-framework).
|
||||
@@ -56,6 +56,7 @@ The following list includes all the AWS checks with configurable variables that
|
||||
| `elb_is_in_multiple_az` | `elb_min_azs` | Integer |
|
||||
| `elbv2_is_in_multiple_az` | `elbv2_min_azs` | Integer |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `iam_user_access_not_stale_to_sagemaker` | `max_unused_sagemaker_access_days` | Integer |
|
||||
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
|
||||
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
@@ -157,6 +158,15 @@ The following list includes all the Vercel checks with configurable variables th
|
||||
| `team_member_role_least_privilege` | `max_owners` | Integer |
|
||||
| `team_no_stale_invitations` | `stale_invitation_threshold_days` | Integer |
|
||||
|
||||
## Okta
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Okta checks with configurable variables that can be changed in the configuration YAML file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|------------------------------------|---------|
|
||||
| `signon_global_session_idle_timeout_15min` | `okta_max_session_idle_minutes` | Integer |
|
||||
|
||||
## Config YAML File Structure
|
||||
|
||||
<Note>
|
||||
@@ -186,6 +196,8 @@ aws:
|
||||
max_unused_access_keys_days: 45
|
||||
# aws.iam_user_console_access_unused --> CIS recommends 45 days
|
||||
max_console_access_days: 45
|
||||
# aws.iam_user_access_not_stale_to_sagemaker --> default 90 days
|
||||
max_unused_sagemaker_access_days: 90
|
||||
|
||||
# AWS EC2 Configuration
|
||||
# aws.ec2_elastic_ip_shodan
|
||||
|
||||
@@ -0,0 +1,259 @@
|
||||
---
|
||||
title: 'Compliance'
|
||||
description: 'Run security checks against compliance frameworks, review posture across providers, and download CSV or PDF reports from Prowler Cloud, Prowler App, and Prowler CLI.'
|
||||
---
|
||||
|
||||
Prowler maps every security check to one or more industry-standard compliance frameworks, so a single scan produces both technical findings and framework-aligned evidence. The same evaluation runs identically whether scans are launched from Prowler Cloud, Prowler App, or Prowler CLI.
|
||||
|
||||
Out of the box, Prowler covers frameworks such as CIS Benchmarks, NIST 800-53, NIST CSF, NIS2, ENS RD2022, ISO 27001, PCI-DSS, SOC 2, GDPR, HIPAA, AWS Well-Architected, BSI C5, CSA CCM, MITRE ATT&CK, KISA ISMS-P, FedRAMP, and Prowler ThreatScore. The full catalog is available at [Prowler Hub](https://hub.prowler.com/compliance).
|
||||
|
||||
<Note>
|
||||
For the unified compliance score methodology used across frameworks, see [Prowler ThreatScore Documentation](/user-guide/compliance/tutorials/threatscore).
|
||||
</Note>
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Prowler Cloud" icon="cloud" href="#prowler-cloud">
|
||||
Review compliance posture using Prowler Cloud
|
||||
</Card>
|
||||
<Card title="Prowler CLI" icon="terminal" href="#prowler-cli">
|
||||
Run compliance scans using Prowler CLI
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Prowler Cloud
|
||||
|
||||
The Compliance section in Prowler Cloud and Prowler App centralizes compliance posture across every connected provider. It aggregates scan results, surfaces Prowler ThreatScore, and exposes detailed requirement-level evidence for each supported framework.
|
||||
|
||||
### Accessing the Compliance Section
|
||||
|
||||
To open the compliance overview, follow these steps:
|
||||
|
||||
1. Sign in to Prowler Cloud at [cloud.prowler.com](https://cloud.prowler.com/sign-in) or to a self-hosted Prowler App instance.
|
||||
2. Select **Compliance** from the left navigation.
|
||||
|
||||
The page lists every framework evaluated by the most recent completed scan of the selected provider.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-overview.png" alt="Compliance overview page in Prowler Cloud and App showing filters, the Prowler ThreatScore card, and the framework grid" width="900" />
|
||||
|
||||
<Note>
|
||||
Compliance results require at least one completed scan. If no scan has finished yet, Prowler Cloud and App display a notice prompting to launch or wait for a scan to complete.
|
||||
</Note>
|
||||
|
||||
### Filtering Compliance Results
|
||||
|
||||
The filters bar at the top of the overview controls which scan and which regions feed every card on the page.
|
||||
|
||||
#### Scan Selector
|
||||
|
||||
The scan selector lists completed scans across all connected providers. Each entry includes the provider type, alias, and completion timestamp. Selecting a scan updates the entire page, including ThreatScore and every framework card.
|
||||
|
||||
#### Region Filter
|
||||
|
||||
The region multi-select narrows results to one or more regions detected in the selected scan. Use it to evaluate compliance posture for a specific geography or account boundary. The filter applies to:
|
||||
|
||||
* The framework grid scores and pass/fail counts.
|
||||
* The detailed requirement view inside each framework.
|
||||
|
||||
<Note>
|
||||
Region filters apply only to providers that report a region attribute (for example, AWS, Azure, and Google Cloud). Providers without regions ignore the filter.
|
||||
</Note>
|
||||
|
||||
#### Clearing Filters
|
||||
|
||||
Select **Clear filters** to reset both the region filter and any other applied filter to its default state. The scan selector is preserved.
|
||||
|
||||
### Reviewing the Prowler ThreatScore Card
|
||||
|
||||
When the selected scan includes Prowler ThreatScore data, a dedicated card appears at the top of the overview, showing:
|
||||
|
||||
* The overall ThreatScore (0–100) with a color-coded indicator.
|
||||
* A progress bar reflecting current posture.
|
||||
* Per-pillar bars for IAM, Attack Surface, and Logging and Monitoring.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-threatscore-card.png" alt="Prowler ThreatScore badge on the Compliance overview showing the overall score and per-pillar bars" width="900" />
|
||||
|
||||
Selecting the card opens the ThreatScore framework detail page, covered in [Working With the Framework Detail Page](#working-with-the-framework-detail-page).
|
||||
|
||||
For a complete explanation of the methodology, formula, and weighting, see [Prowler ThreatScore Documentation](/user-guide/compliance/tutorials/threatscore).
|
||||
|
||||
### Exploring the Framework Grid
|
||||
|
||||
Below ThreatScore, the framework grid shows one card per supported compliance framework. Each card includes:
|
||||
|
||||
* **Framework logo and name:** Identifies the standard (CIS, NIST, ENS, ISO 27001, PCI-DSS, SOC 2, NIS2, CSA CCM, MITRE ATT&CK, and more).
|
||||
* **Version:** Indicates the framework version applied to the scan.
|
||||
* **Score:** The percentage of passing requirements over the total evaluated.
|
||||
* **Passing Requirements:** A `passed / total` counter for additional context.
|
||||
* **Download dropdown:** Quick access to the CSV report and, when supported, the PDF report.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-card-download.png" alt="Download dropdown on a framework card showing CSV and PDF report options" width="500" />
|
||||
|
||||
Select any card to open the framework detail page.
|
||||
|
||||
<Note>
|
||||
Score color coding follows three thresholds: red for severely low compliance, amber for partial compliance, and green for healthy posture. Hover over the score for the exact percentage.
|
||||
</Note>
|
||||
|
||||
### Working With the Framework Detail Page
|
||||
|
||||
The detail page provides everything needed to evaluate a single framework: aggregate metrics, top failure sections, and a requirement-by-requirement view.
|
||||
|
||||
#### Header, Summary Cards, and Download Actions
|
||||
|
||||
The header shows the framework name, version, the provider scan being reviewed, and CSV / PDF download buttons. Below the header, summary cards condense the framework state at a glance:
|
||||
|
||||
* **Requirements Status:** Donut chart with `Pass`, `Fail`, and `Manual` counts plus the total number of requirements.
|
||||
* **Top Failed Sections:** Ranks the sections or pillars with the highest number of failing requirements.
|
||||
* **ThreatScore Breakdown:** Appears only on the ThreatScore framework. It shows the overall score and per-pillar scores aligned with the ThreatScore pillars (IAM, Attack Surface, Logging and Monitoring, Encryption).
|
||||
|
||||
The same layout applies to every compliance framework. ThreatScore is the only framework that includes the extra Breakdown card on the left; for any other framework, the Requirements Status and Top Failed Sections cards span the full row.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-threatscore-detail.png" alt="Prowler ThreatScore detail page including the extra Breakdown card alongside Requirements Status and Top Failed Sections" width="900" />
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-detail-header.png" alt="CIS framework detail page showing only the Requirements Status donut and the Top Failed Sections card, without the ThreatScore Breakdown" width="900" />
|
||||
|
||||
#### Requirements Accordion
|
||||
|
||||
Below the summary cards, an accordion organizes every requirement of the framework. Expand a section to see:
|
||||
|
||||
* **Requirement ID and title:** Reflect the official identifier from the framework.
|
||||
* **Pass / Fail / Manual badges:** Indicate the status of each requirement based on the underlying checks.
|
||||
* **Custom details panel:** Opens additional context tailored to the framework. For frameworks with custom layouts, the panel surfaces fields such as control objectives, severity, attack tactics, regulatory references, or required evidence.
|
||||
|
||||
Select a requirement to open the detail panel and review the failing checks, the resources affected, and remediation guidance.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-requirements-accordion.png" alt="Expanded CIS requirement showing description, rationale, remediation procedure, audit procedure, profile and assessment tags, references, and the underlying check" width="900" />
|
||||
|
||||
##### Frameworks With Custom Detail Layouts
|
||||
|
||||
Several frameworks include enriched detail panels that highlight fields specific to the standard:
|
||||
|
||||
* ASD Essential Eight
|
||||
* AWS Well-Architected Framework
|
||||
* BSI C5
|
||||
* Cloud Controls Matrix (CSA CCM)
|
||||
* CIS Benchmarks
|
||||
* CCC (Common Cloud Controls)
|
||||
* ENS RD2022
|
||||
* ISO 27001
|
||||
* KISA ISMS-P
|
||||
* MITRE ATT&CK
|
||||
* Prowler ThreatScore
|
||||
|
||||
Frameworks without a custom layout fall back to the generic details panel, which still exposes the official requirement metadata captured by Prowler.
|
||||
|
||||
### Downloading Compliance Reports
|
||||
|
||||
Prowler Cloud and App expose two formats:
|
||||
|
||||
* **CSV report:** Every requirement, every check, and every finding for the selected scan and filters. Available for all supported frameworks.
|
||||
* **PDF report:** Curated executive-style report. Currently supported for Prowler ThreatScore, ENS RD2022, NIS2, and CSA CCM. Additional PDF reports are added in subsequent Prowler releases.
|
||||
|
||||
#### Downloading From the Detail Page
|
||||
|
||||
Inside any framework detail page, the **CSV** and **PDF** buttons in the header trigger the same downloads as the overview dropdown. The PDF button only appears for frameworks that support it.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-detail-download.png" alt="Top of a framework detail page showing the CSV and PDF download buttons in the header" width="900" />
|
||||
|
||||
<Note>
|
||||
Region filters disable the per-card download dropdown to avoid generating partial reports. Open the framework detail page when downloads scoped to a region are required, or remove the region filter to download the full report.
|
||||
</Note>
|
||||
|
||||
#### Downloading the Full Scan Output
|
||||
|
||||
To export every framework, finding, and resource at once, use the **Scan Jobs** section instead. The ZIP archive contains the CSV, JSON-OCSF, and HTML reports plus a `compliance/` subfolder with one CSV per framework. See [Prowler App — Getting Started](/user-guide/tutorials/prowler-app) for details.
|
||||
|
||||
### API Access
|
||||
|
||||
Every report available in the UI is also reachable through the Prowler API. The following endpoints are the most relevant:
|
||||
|
||||
* [Retrieve a scan compliance report as CSV](https://api.prowler.com/api/v1/docs#tag/Scan/operation/scans_compliance_retrieve)
|
||||
* [Download a complete scan output (ZIP)](https://api.prowler.com/api/v1/docs#tag/Scan/operation/scans_report_retrieve)
|
||||
|
||||
Use the API to integrate compliance evidence into ticketing systems, executive dashboards, or downstream pipelines.
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
Prowler CLI evaluates the same compliance frameworks as Prowler Cloud and App, and produces detailed CSV outputs alongside the standard scan results. By default, it runs every supported framework and prints a status summary at the end of the scan:
|
||||
|
||||
<img src="/images/cli/compliance/compliance.png" />
|
||||
|
||||
Detailed compliance results are stored as CSV files under the `compliance/` subfolder of Prowler's output directory.
|
||||
|
||||
### Scan a Specific Compliance Framework
|
||||
|
||||
To scope a scan to a single framework and get the framework-specific summary, use the `--compliance` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
|
||||
Standard results plus the framework breakdown are printed to the terminal. A dedicated CSV is also generated under the `compliance/` output folder. Sample output for CIS AWS 2.0:
|
||||
|
||||
<img src="/images/cli/compliance/compliance-cis-sample1.png" />
|
||||
|
||||
<Note>
|
||||
If Prowler cannot find a resource related with a check from a compliance requirement, that requirement is omitted from the output.
|
||||
</Note>
|
||||
|
||||
### List Available Compliance Frameworks
|
||||
|
||||
To see which compliance frameworks are covered by a given provider, use the `--list-compliance` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance
|
||||
```
|
||||
|
||||
The full catalog is also browsable at [Prowler Hub](https://hub.prowler.com/compliance).
|
||||
|
||||
### List Requirements of a Compliance Framework
|
||||
|
||||
To inspect the requirements that compose a specific framework, use the `--list-compliance-requirements` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance-requirements <compliance_framework(s)>
|
||||
```
|
||||
|
||||
Sample output for the first requirements of CIS 1.5 for AWS:
|
||||
|
||||
```
|
||||
Listing CIS 1.5 AWS Compliance Requirements:
|
||||
|
||||
Requirement Id: 1.1
|
||||
- Description: Maintain current contact details
|
||||
- Checks:
|
||||
account_maintain_current_contact_details
|
||||
|
||||
Requirement Id: 1.2
|
||||
- Description: Ensure security contact information is registered
|
||||
- Checks:
|
||||
account_security_contact_information_is_registered
|
||||
|
||||
Requirement Id: 1.3
|
||||
- Description: Ensure security questions are registered in the AWS account
|
||||
- Checks:
|
||||
account_security_questions_are_registered_in_the_aws_account
|
||||
|
||||
Requirement Id: 1.4
|
||||
- Description: Ensure no 'root' user account access key exists
|
||||
- Checks:
|
||||
iam_no_root_access_key
|
||||
|
||||
Requirement Id: 1.5
|
||||
- Description: Ensure MFA is enabled for the 'root' user account
|
||||
- Checks:
|
||||
iam_root_mfa_enabled
|
||||
|
||||
[redacted]
|
||||
|
||||
```
|
||||
|
||||
## Contributing New Compliance Frameworks
|
||||
|
||||
To request a new framework or contribute one, see [Creating a New Security Compliance Framework in Prowler](/developer-guide/security-compliance-framework). The developer guide covers the Pydantic schema, JSON catalog, output formatter, and PR submission steps required to ship a new framework end to end.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
* [Prowler ThreatScore Documentation](/user-guide/compliance/tutorials/threatscore)
|
||||
* [Creating a New Security Compliance Framework in Prowler](/developer-guide/security-compliance-framework)
|
||||
* [Prowler App — Getting Started](/user-guide/tutorials/prowler-app)
|
||||
@@ -4,7 +4,7 @@ title: 'Check Mapping Prowler v4/v3 to v2'
|
||||
|
||||
Prowler v3 and v4 introduce distinct identifiers while preserving the checks originally implemented in v2. This change was made because, in previous versions, check names were primarily derived from the CIS Benchmark for AWS. Starting with v3 and v4, all checks are independent of any security framework and have unique names and IDs.
|
||||
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/cli/tutorials/compliance) section.
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/compliance/tutorials/compliance) section.
|
||||
|
||||
```
|
||||
checks_v4_v3_to_v2_mapping = {
|
||||
|
||||
@@ -398,7 +398,7 @@ prowler oci --severity critical high
|
||||
|
||||
### Next Steps
|
||||
|
||||
- Learn about [Compliance Frameworks](/user-guide/cli/tutorials/compliance) in Prowler
|
||||
- Learn about [Compliance Frameworks](/user-guide/compliance/tutorials/compliance) in Prowler
|
||||
- Review [Prowler Output Formats](/user-guide/cli/tutorials/reporting)
|
||||
- Explore [Integrations](/user-guide/cli/tutorials/integrations) with SIEM and ticketing systems
|
||||
|
||||
|
||||
@@ -0,0 +1,186 @@
|
||||
---
|
||||
title: 'Okta Authentication in Prowler'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.27.0" />
|
||||
|
||||
Prowler authenticates to Okta as a **service application** using **OAuth 2.0 with a private-key JWT** (Client Credentials grant). The integration is read-only by scope and follows DISA STIG guidance for least-privilege access.
|
||||
|
||||
## Common Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- An Okta organization. The UI examples below use **Identity Engine** terminology such as **Global Session Policy**; Classic Engine exposes equivalent sign-on policy concepts under older naming.
|
||||
- A **Super Administrator** account on that organization for the one-time service-app setup.
|
||||
- An **API Services** app integration created in the Okta Admin Console.
|
||||
|
||||
### Authentication Method Overview
|
||||
|
||||
| Method | Status | Use Case |
|
||||
|---|---|---|
|
||||
| **OAuth 2.0 (private-key JWT)** | Supported | Production scans, CI/CD, Prowler App. |
|
||||
|
||||
The private-key JWT flow is the only supported authentication method in the initial release. The service application proves possession of a private key on every token request; Okta returns a short-lived access token, refreshed automatically by the SDK.
|
||||
|
||||
<Note>
|
||||
If a different authentication method is needed (SSWS API token, OAuth with user delegation, etc.), please open a [feature request](https://github.com/prowler-cloud/prowler/issues/new?template=feature-request.yml) describing the use case.
|
||||
</Note>
|
||||
|
||||
### Required OAuth Scopes
|
||||
|
||||
For the initial check (`signon_global_session_idle_timeout_15min`) only one scope is required:
|
||||
|
||||
- `okta.policies.read`
|
||||
|
||||
Additional scopes will be needed as more services and checks are added, this are the current ones needed:
|
||||
|
||||
| Scope | Used by |
|
||||
|---|---|
|
||||
| `okta.policies.read` | Sign-on / password / authentication policies |
|
||||
|
||||
### Required Admin Role
|
||||
|
||||
The service application must be assigned the built-in **Read-Only Administrator** role.
|
||||
|
||||
Okta's Management API enforces a two-layer authorization model: an OAuth **scope** decides which API endpoints the token can call, and an **admin role** decides whether the call returns data. With only a scope granted, the token mint succeeds but every read returns `403 Forbidden`. The Read-Only Administrator role is the minimum that lets the granted `okta.*.read` scopes actually return configuration data to Prowler's checks — without it, the credential probe at provider startup fails and the scan never gets to evaluate any check.
|
||||
|
||||
Read-Only Administrator is intentionally the narrowest role that satisfies this requirement and aligns with the least-privilege guidance in DISA STIG.
|
||||
|
||||
## Step-by-Step Setup
|
||||
|
||||
### 1. Go to the admin console
|
||||
|
||||

|
||||
|
||||
### 2. [Optional] - Disable the privilege-escalation bypass (org-wide, one-time)
|
||||
|
||||
In the Okta Admin Console, go to **Settings → Account → Public client app admins** and ensure it is **off**. When enabled, every API Services app can be auto-assigned the Super Administrator role after scopes are granted, which would invalidate the read-only premise of this integration.
|
||||
|
||||

|
||||
|
||||
### 3. Create the API Services app
|
||||
|
||||
1. Go to **Applications → Applications**.
|
||||
|
||||

|
||||
|
||||
2. **Create App Integration**
|
||||
|
||||

|
||||
|
||||
3. Sign-in method: **API Services**. Click **Next**.
|
||||
4. Name the app (for example, `Prowler Scanner`) and click **Save**.
|
||||
5. Copy the displayed **Client ID** — you'll use it as `OKTA_CLIENT_ID`.
|
||||
|
||||

|
||||
|
||||
### 4. Switch to private-key authentication and generate a keypair
|
||||
|
||||
On the new app's **General** tab, scroll to **Client Credentials**:
|
||||
|
||||
1. Click **Edit**.
|
||||
2. Set **Client authentication** to **Public key / Private key**.
|
||||
3. Under **Public Keys**, click **Add key**.
|
||||
4. In the modal, click **Generate new key**. Okta creates a JWK pair.
|
||||
5. Click the **PEM** tab to switch the displayed format (or keep JWK — Prowler accepts both).
|
||||
6. Copy the entire `-----BEGIN PRIVATE KEY-----` block (or the JWK JSON).
|
||||
7. Click **Done**, then **Save**.
|
||||
|
||||
<Warning>
|
||||
Okta displays the private key **only once**. If you close the modal without copying, you must generate a new key.
|
||||
</Warning>
|
||||
|
||||

|
||||
|
||||
### 5. Grant the required OAuth scopes
|
||||
|
||||
On the app, open the **Okta API Scopes** tab and click **Grant** on every scope Prowler needs. For the initial release, granting only `okta.policies.read` is sufficient.
|
||||
|
||||

|
||||
|
||||
### 6. Assign the Read-Only Administrator role
|
||||
|
||||
On the app, open the **Admin roles** tab and click **Edit assignments → Add assignment**:
|
||||
|
||||
- **Role:** Read-Only Administrator
|
||||
- **Resources:** All resources
|
||||
|
||||
Save the changes.
|
||||
|
||||

|
||||
|
||||
### 7. [Optional] Verify DPoP setting
|
||||
|
||||
Prowler sends DPoP (Demonstrating Proof of Possession) proofs on every token request. The integration works whether the **Require Demonstrating Proof of Possession (DPoP) header in token requests** setting on the service app is on or off — but enabling it is the more secure default.
|
||||
|
||||
## Prowler CLI Authentication
|
||||
|
||||
### Using Environment Variables (Required for Secrets)
|
||||
|
||||
Private key material **must** be supplied via environment variables — Prowler does not accept secrets through CLI flags.
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="YOUR-ORG.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
|
||||
# Either of the two — content takes precedence over file when both are set.
|
||||
export OKTA_PRIVATE_KEY_FILE="/secure/path/to/prowler-okta.pem"
|
||||
# or
|
||||
export OKTA_PRIVATE_KEY="$(cat /secure/path/to/prowler-okta.pem)"
|
||||
|
||||
# Optional — defaults to "okta.policies.read"
|
||||
export OKTA_SCOPES="okta.policies.read"
|
||||
|
||||
poetry run python prowler-cli.py okta
|
||||
```
|
||||
|
||||
### Non-Secret CLI Flags
|
||||
|
||||
Non-secret values are also available as CLI flags for ergonomic overrides:
|
||||
|
||||
| Flag | Equivalent env var |
|
||||
|---|---|
|
||||
| `--okta-org-domain` | `OKTA_ORG_DOMAIN` |
|
||||
| `--okta-client-id` | `OKTA_CLIENT_ID` |
|
||||
| `--okta-scopes` | `OKTA_SCOPES` |
|
||||
|
||||
Run a single check directly:
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py okta --check signon_global_session_idle_timeout_15min
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `OktaInvalidOrgDomainError`
|
||||
|
||||
The org domain must be `<org>.okta.com` (or `.oktapreview.com` / `.okta-emea.com` / `.okta-gov.com` / `.okta.mil` / `.okta-miltest.com` / `.trex-govcloud.com`). Pass the bare hostname only — no `https://` scheme, no path, no trailing slash. Custom (vanity) domains are not currently accepted.
|
||||
|
||||
### `OktaPrivateKeyFileError`
|
||||
|
||||
The file at `OKTA_PRIVATE_KEY_FILE` is missing, unreadable, or empty. Confirm the path and that the file contains a non-empty PEM block or JWK JSON document.
|
||||
|
||||
### `OktaInvalidCredentialsError` at provider init
|
||||
|
||||
Prowler validates credentials at startup by listing one sign-on policy. This error indicates the credential material itself was rejected:
|
||||
|
||||
- **`invalid_client`** — the public key registered in Okta does not match the private key on disk. Generate a fresh keypair and try again.
|
||||
|
||||
### `OktaInsufficientPermissionsError` at provider init
|
||||
|
||||
Raised when the credential probe succeeds at the OAuth layer but the request is rejected because the service app lacks the required scope or admin role:
|
||||
|
||||
- **`invalid_scope`** — the `okta.policies.read` scope is not granted on the service app. Grant it from **Okta API Scopes**.
|
||||
- **`Forbidden` / `not authorized`** — the **Read-Only Administrator** role is not assigned to the service app. Assign it from **Admin roles**.
|
||||
|
||||
### `invalid_dpop_proof`
|
||||
|
||||
The org or the service app requires DPoP. The provider always sends DPoP proofs, so this error indicates the SDK could not build a valid proof — typically because the private key on disk does not match the public key uploaded to Okta. Regenerate the keypair.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Implement OAuth 2.0 for an Okta service app](https://developer.okta.com/docs/guides/implement-oauth-for-okta-serviceapp/main/)
|
||||
- [Okta Policy API reference](https://developer.okta.com/docs/api/openapi/okta-management/management/tag/Policy/)
|
||||
- [DISA STIG for Okta (V-273186)](https://stigviewer.com/stigs/okta/)
|
||||
@@ -0,0 +1,144 @@
|
||||
---
|
||||
title: 'Getting Started With Okta on Prowler'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
Prowler for Okta scans an Okta organization for identity and session-management misconfigurations. The provider authenticates as a service application using **OAuth 2.0 with a private-key JWT** (Client Credentials grant) — no end-user login, read-only by scope.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Set up authentication for Okta with the [Okta Authentication](/user-guide/providers/okta/authentication) guide before starting:
|
||||
|
||||
- An Okta organization. The UI examples below use **Identity Engine** terminology such as **Global Session Policy**; Classic Engine exposes the equivalent sign-on policy concepts under older names.
|
||||
- A **Super Administrator** account on that organization for the one-time service-app setup.
|
||||
- An **API Services** app integration in the Okta Admin Console with the `okta.policies.read` scope granted and the **Read-Only Administrator** role assigned.
|
||||
- Python 3.10+ and Prowler 5.27.0 or later installed locally.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Prowler Cloud" icon="cloud" href="#prowler-cloud">
|
||||
Onboard Okta using Prowler Cloud
|
||||
</Card>
|
||||
<Card title="Prowler CLI" icon="terminal" href="#prowler-cli">
|
||||
Onboard Okta using Prowler CLI
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Prowler Cloud
|
||||
|
||||
<Note>
|
||||
Prowler Cloud onboarding for Okta is coming soon. Track the [Prowler GitHub repository](https://github.com/prowler-cloud/prowler) for release updates. Use the [Prowler CLI](#prowler-cli) workflow below in the meantime.
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
<VersionBadge version="5.27.0" />
|
||||
|
||||
### Step 1: Set Up Authentication
|
||||
|
||||
Follow the [Okta Authentication](/user-guide/providers/okta/authentication) guide to create the service application, generate a keypair, grant scopes, and assign the Read-Only Administrator role. Then export the credentials:
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="acme.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
export OKTA_PRIVATE_KEY_FILE="/secure/path/to/prowler-okta.pem"
|
||||
# Optional — defaults to "okta.policies.read"
|
||||
export OKTA_SCOPES="okta.policies.read"
|
||||
```
|
||||
|
||||
The private key file may contain either a PEM-encoded RSA key or a JWK JSON document.
|
||||
|
||||
#### Supplying the Private Key as Content
|
||||
|
||||
For automated environments where writing the key to disk is not desirable (CI runners, container secrets, etc.), the private key may be passed directly as a string:
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="acme.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
export OKTA_PRIVATE_KEY="$(cat /secure/path/to/prowler-okta.pem)"
|
||||
```
|
||||
|
||||
`OKTA_PRIVATE_KEY` takes precedence over `OKTA_PRIVATE_KEY_FILE` when both are set. The private key is intentionally not exposed as a CLI flag — secrets must be supplied via environment variables only.
|
||||
|
||||
### Step 2: Run the First Scan
|
||||
|
||||
Run a baseline scan after credentials are configured:
|
||||
|
||||
```bash
|
||||
prowler okta
|
||||
```
|
||||
|
||||
Or run a specific check directly:
|
||||
|
||||
```bash
|
||||
prowler okta --check signon_global_session_idle_timeout_15min
|
||||
```
|
||||
|
||||
Prowler prints a summary table; full findings are written to the configured output formats.
|
||||
|
||||
### Step 3: Use a Custom Configuration (Optional)
|
||||
|
||||
Prowler uses a configuration file to customize check thresholds. The Okta configuration currently includes:
|
||||
|
||||
```yaml
|
||||
okta:
|
||||
# okta.signon_global_session_idle_timeout_15min
|
||||
# Defaults to 15 minutes per DISA STIG V-273186.
|
||||
okta_max_session_idle_minutes: 15
|
||||
```
|
||||
|
||||
To use a custom configuration:
|
||||
|
||||
```bash
|
||||
prowler okta --config-file /path/to/config.yaml
|
||||
```
|
||||
|
||||
## Supported Services
|
||||
|
||||
Prowler for Okta includes security checks across the following services:
|
||||
|
||||
| Service | Description |
|
||||
| ----------- | ----------------------------------------------------------------------------------- |
|
||||
| **Sign-On** | Global session policy controls (idle timeout, lifetime, rule priority and ordering) |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### STIG Rule Ordering
|
||||
|
||||
The initial check is mapped to DISA STIG `V-273186` / `OKTA-APP-000020`. Prowler implements the STIG procedure as written: the **Default Policy** must have a **Priority 1** rule that is **not** `Default Rule`, and that rule must set **Maximum Okta global session idle time** to 15 minutes or less.
|
||||
|
||||
This is stricter than simply finding the same timeout value somewhere else in the policy set. A compliant custom rule in another policy, or a compliant timeout on the built-in `Default Rule`, does not satisfy this STIG procedure.
|
||||
|
||||
### Default Scopes
|
||||
|
||||
Prowler requests a fixed set of OAuth scopes on every token exchange. The default is a single scope that covers the bundled initial check:
|
||||
|
||||
- `okta.policies.read`
|
||||
|
||||
The service app must have that scope granted in the **Okta API Scopes** tab. When the granted set is narrower than the requested set, the token request fails with an `invalid_scope` error and the scan stops at provider initialization.
|
||||
|
||||
When additional checks are enabled — or when running against a service app that exposes a different scope set — override the default with `OKTA_SCOPES` (comma-separated string for the env var) or `--okta-scopes` (space-separated list for the CLI):
|
||||
|
||||
```bash
|
||||
# Environment variable — comma-separated
|
||||
export OKTA_SCOPES="okta.policies.read,okta.apps.read,okta.users.read"
|
||||
|
||||
# CLI flag — space-separated
|
||||
prowler okta --okta-scopes okta.policies.read okta.apps.read okta.users.read
|
||||
```
|
||||
|
||||
For the full catalog of OAuth scopes exposed by the Okta Management API, refer to the [Okta OAuth 2.0 scopes documentation](https://developer.okta.com/docs/api/oauth2/).
|
||||
|
||||
<Note>
|
||||
As new services and checks land in the Okta provider, the default scope list grows alongside them. Re-check the granted scopes on the service app after each Prowler upgrade and grant any newly required `okta.*.read` scopes in the Admin Console.
|
||||
</Note>
|
||||
|
||||
### Common Errors
|
||||
|
||||
- **`OktaInvalidOrgDomainError`** — the org domain must be `<org>.okta.com` (or `.oktapreview.com` / `.okta-emea.com` / `.okta-gov.com` / `.okta.mil` / `.okta-miltest.com` / `.trex-govcloud.com`). Pass the bare hostname only — no `https://` scheme, no path, no trailing slash.
|
||||
- **`OktaPrivateKeyFileError`** — confirm the file is readable and contains a non-empty PEM or JWK body.
|
||||
- **`OktaInsufficientPermissionsError`** — the credential probe reached Okta but the service app cannot perform the request. The error string carries `invalid_scope`, `Forbidden`, `not authorized`, or `permission`. Fix by granting the missing `okta.*.read` scope from **Okta API Scopes** and confirming the **Read-Only Administrator** role is assigned to the service app.
|
||||
- **`OktaInvalidCredentialsError`** — the credential probe reached Okta but Okta rejected the JWT. Typically the private key on disk does not match the public JWK uploaded to the service app, or the JWT signing parameters are wrong. Regenerate the keypair and re-upload the public JWK.
|
||||
- **Token requests failing for an unknown scope** — the app was granted a narrower scope set than `OKTA_SCOPES` requests. Either narrow `OKTA_SCOPES` or grant the missing scopes in the Admin Console.
|
||||
|
After Width: | Height: | Size: 159 KiB |
|
After Width: | Height: | Size: 134 KiB |
|
After Width: | Height: | Size: 173 KiB |
|
After Width: | Height: | Size: 127 KiB |
|
After Width: | Height: | Size: 83 KiB |
|
After Width: | Height: | Size: 78 KiB |
|
After Width: | Height: | Size: 216 KiB |
|
After Width: | Height: | Size: 56 KiB |
@@ -0,0 +1,146 @@
|
||||
---
|
||||
title: 'Alerts'
|
||||
description: 'Create email alerts from Prowler Cloud findings to monitor relevant security changes after scans or in daily digests.'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.26.0" />
|
||||
|
||||
Alerts notify recipients by email when security findings match saved filter conditions. Use Alerts to track high-priority findings, monitor specific providers or services, and keep teams informed about scan results that match defined criteria.
|
||||
|
||||
<Note>
|
||||
This feature is available exclusively in **Prowler Cloud** with a paid subscription.
|
||||
</Note>
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before creating Alerts, ensure that:
|
||||
|
||||
* At least one scan has completed and produced findings.
|
||||
* The user role includes the `manage_alerts` permission.
|
||||
|
||||
The `manage_alerts` permission is required to create, edit, test, enable, disable, and delete Alerts. See [RBAC Administrative Permissions](/user-guide/tutorials/prowler-app-rbac#rbac-administrative-permissions) for details.
|
||||
|
||||
## How Alerts Work
|
||||
|
||||
Alerts are created from Findings filters. When an Alert runs, Prowler Cloud evaluates the saved conditions against findings and sends an email digest when matching findings exist.
|
||||
|
||||
<Note>
|
||||
Alerts evaluate findings with status `FAIL` only. Findings with status `PASS` or `MANUAL`, and muted findings, never trigger an Alert regardless of the saved filters.
|
||||
</Note>
|
||||
|
||||
Alerts run on one of three schedules:
|
||||
|
||||
| Frequency | Description |
|
||||
|-----------|-------------|
|
||||
| After each scan | Evaluates the Alert after each completed scan. |
|
||||
| Daily digest | Evaluates the Alert once per day and sends a digest when findings match. |
|
||||
| After each scan and daily | Evaluates the Alert after every scan and in the daily digest. |
|
||||
|
||||
## Creating an Alert From Findings
|
||||
|
||||
To create an Alert:
|
||||
|
||||
1. Navigate to **Findings** in Prowler Cloud.
|
||||
2. Apply at least one [Alert-compatible filter](#alert-compatible-filters) to define the findings that should trigger the Alert.
|
||||
3. Click **Create Alert**.
|
||||
|
||||

|
||||
|
||||
4. Configure the Alert settings:
|
||||
* **Name:** Add a short, descriptive name.
|
||||
* **Description:** Add optional context for the Alert.
|
||||
* **Frequency:** Select when Prowler Cloud should evaluate the Alert.
|
||||
* **Recipients:** Select the recipients who should receive the email digest.
|
||||
|
||||

|
||||
|
||||
5. Click **Create**.
|
||||
|
||||
After the Alert is created, Prowler Cloud evaluates it based on the selected frequency.
|
||||
|
||||
## Alert-Compatible Filters
|
||||
|
||||
An **Alert-compatible filter** is a Findings-page filter that the Alert condition language can evaluate when the Alert runs. The Findings page exposes many filters, but only a specific subset can be saved into an Alert. Filters outside this subset, such as **Status**, free-text search, sort, or pagination, are ignored when seeding an Alert from the current Findings view.
|
||||
|
||||
When **Create Alert** is clicked on the Findings page, Prowler Cloud takes the active filters, keeps only the Alert-compatible ones, and uses them to build the Alert condition.
|
||||
|
||||
The following filters are Alert-compatible:
|
||||
|
||||
* Provider type
|
||||
* Provider
|
||||
* Severity
|
||||
* Delta (new findings since the previous scan)
|
||||
* Region
|
||||
* Service
|
||||
* Resource type
|
||||
* Category
|
||||
* Resource group
|
||||
|
||||
If only the **Status** filter is applied on the Findings page, Prowler Cloud substitutes all severities as the condition base so the Alert can still be created. Status itself never becomes part of the Alert condition.
|
||||
|
||||
## Managing Alerts
|
||||
|
||||
Navigate to **Alerts** to review and manage existing Alerts.
|
||||
|
||||

|
||||
|
||||
Each Alert provides these actions:
|
||||
|
||||
| Action | Description |
|
||||
|--------|-------------|
|
||||
| Edit | Update name, description, recipients, frequency, or filters. |
|
||||
| Enable/Disable | Start or stop Alert evaluation without deleting the Alert. |
|
||||
| Delete | Permanently remove the Alert. |
|
||||
|
||||
## Testing Alert Filters
|
||||
|
||||
When editing an Alert, click **Test** to preview whether the current filters match existing findings.
|
||||
|
||||
The test result indicates whether the filters match findings and includes a summary of the matching results.
|
||||
|
||||

|
||||
|
||||
<Warning>
|
||||
**The Test result is a snapshot, not a guarantee of future Alert triggers.**
|
||||
|
||||
The Test evaluates the current filters against existing findings at the moment **Test** is clicked. It does not predict whether the Alert will trigger on its next evaluation. The Alert trigger depends on the state at evaluation time:
|
||||
|
||||
* **After each scan:** The Alert is evaluated against the findings produced by that scan only. If the next scan produces no findings that match the filters, the Alert will not trigger, even if a Test run earlier in the day showed matches.
|
||||
* **Daily digest:** The Alert is evaluated against the findings present on the digest day. If no matching findings exist for that day, the Alert will not trigger, even if previous days had matches.
|
||||
|
||||
The reverse is also true: a Test showing no matches does not guarantee the Alert will stay silent. Future scans may produce matching findings.
|
||||
|
||||
Use **Test** to validate that the filters are well-formed and target the intended findings, not to forecast future Alert behavior.
|
||||
</Warning>
|
||||
|
||||
## Recipients
|
||||
|
||||
Alert recipients are selected from the email addresses available in the tenant. Recipients receive an email digest each time an Alert evaluates and matches findings.
|
||||
|
||||
<Note>
|
||||
By default, the **organization owner** receives a **daily digest** for **critical findings**. Adjust the recipient, frequency, or filters in the Alert configuration to change this behavior.
|
||||
</Note>
|
||||
|
||||
If a recipient unsubscribes from Alerts, that address stops receiving digests until it is reconfirmed.
|
||||
|
||||
## Email Notifications
|
||||
|
||||
When an Alert matches findings, Prowler Cloud sends a security alert email that summarizes the matching findings. The email includes:
|
||||
|
||||
* The scan name and evaluation time.
|
||||
* The total number of matching findings.
|
||||
* The number of Alert rules that triggered.
|
||||
* A preview of the affected findings, grouped by severity, with resource details and the originating rule.
|
||||
* A direct link to view all matching findings in Prowler Cloud.
|
||||
|
||||

|
||||
|
||||
## Best Practices
|
||||
|
||||
* **Start with focused filters:** Create Alerts for specific high-priority scopes, such as critical findings, production providers, or important services.
|
||||
* **Use clear names:** Choose names that explain the intent of the Alert.
|
||||
* **Review recipients regularly:** Keep recipient lists aligned with current ownership.
|
||||
* **Test before saving edits:** Use **Test** after changing filters to confirm that the Alert matches the expected findings.
|
||||
* **Disable instead of deleting during tuning:** Disable Alerts temporarily when adjusting filters or recipients.
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Prowler v3 and v4 introduce distinct identifiers while preserving the checks originally implemented in v2. This change was made because, in previous versions, check names were primarily derived from the CIS Benchmark for AWS. Starting with v3 and v4, all checks are independent of any security framework and have unique names and IDs.
|
||||
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/cli/tutorials/compliance) section.
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/compliance/tutorials/compliance) section.
|
||||
|
||||
```
|
||||
checks_v4_v3_to_v2_mapping = {
|
||||
|
||||
@@ -1009,7 +1009,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
version = "2.33.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
@@ -1017,9 +1017,9 @@ dependencies = [
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -12,6 +12,20 @@ files = [
|
||||
{file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aenum"
|
||||
version = "3.1.17"
|
||||
description = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"okta\""
|
||||
files = [
|
||||
{file = "aenum-3.1.17-py2-none-any.whl", hash = "sha256:0dad0421b2fbe30e3fb623b2a0a23eff823407df53829d6a72595e7f76f3d872"},
|
||||
{file = "aenum-3.1.17-py3-none-any.whl", hash = "sha256:8b883a37a04e74cc838ac442bdd28c266eae5bbf13e1342c7ef123ed25230139"},
|
||||
{file = "aenum-3.1.17.tar.gz", hash = "sha256:a969a4516b194895de72c875ece355f17c0d272146f7fda346ef74f93cf4d5ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiofiles"
|
||||
version = "24.1.0"
|
||||
@@ -3144,6 +3158,23 @@ files = [
|
||||
[package.dependencies]
|
||||
referencing = ">=0.31.0"
|
||||
|
||||
[[package]]
|
||||
name = "jwcrypto"
|
||||
version = "1.5.7"
|
||||
description = "Implementation of JOSE Web standards"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"okta\""
|
||||
files = [
|
||||
{file = "jwcrypto-1.5.7-py3-none-any.whl", hash = "sha256:729463fefe28b6de5cf1ebfda3e94f1a1b41d2799148ef98a01cb9678ebe2bb0"},
|
||||
{file = "jwcrypto-1.5.7.tar.gz", hash = "sha256:70204d7cca406eda8c82352e3c41ba2d946610dafd19e54403f0a1f4f18633c6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=3.4"
|
||||
typing_extensions = ">=4.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "keystoneauth1"
|
||||
version = "5.13.0"
|
||||
@@ -4112,6 +4143,36 @@ urllib3 = {version = ">=2.6.3", markers = "python_version >= \"3.10.0\""}
|
||||
[package.extras]
|
||||
adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""]
|
||||
|
||||
[[package]]
|
||||
name = "okta"
|
||||
version = "3.4.2"
|
||||
description = "Python SDK for the Okta Management API"
|
||||
optional = true
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"okta\""
|
||||
files = [
|
||||
{file = "okta-3.4.2-py3-none-any.whl", hash = "sha256:b67bcff31de65223c5848894a202153236d0c99e3a8541a54bf7065f81676637"},
|
||||
{file = "okta-3.4.2.tar.gz", hash = "sha256:b05201056f3f028c5d2d16394f9b47024a689080f5a993c11d4d80f0e1b5ba1e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aenum = ">=3.1.16"
|
||||
aiohttp = ">=3.13.4"
|
||||
blinker = ">=1.9.0"
|
||||
jwcrypto = ">=1.5.6"
|
||||
pycryptodomex = ">=3.23.0"
|
||||
pydantic = ">=2.11.3"
|
||||
pydash = ">=8.0.6"
|
||||
PyJWT = ">=2.12.0"
|
||||
python-dateutil = ">=2.9.0.post0"
|
||||
PyYAML = ">=6.0.3"
|
||||
requests = ">=2.33.0"
|
||||
xmltodict = ">=1.0.2"
|
||||
|
||||
[package.extras]
|
||||
images = ["pillow (>=9.0.0,<12)"]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-schema-validator"
|
||||
version = "0.6.3"
|
||||
@@ -4752,6 +4813,58 @@ files = [
|
||||
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycryptodomex"
|
||||
version = "3.23.0"
|
||||
description = "Cryptographic library for Python"
|
||||
optional = true
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"okta\""
|
||||
files = [
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:add243d204e125f189819db65eed55e6b4713f70a7e9576c043178656529cec7"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1c6d919fc8429e5cb228ba8c0d4d03d202a560b421c14867a65f6042990adc8e"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:1c3a65ad441746b250d781910d26b7ed0a396733c6f2dbc3327bd7051ec8a541"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-win32.whl", hash = "sha256:47f6d318fe864d02d5e59a20a18834819596c4ed1d3c917801b22b92b3ffa648"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:d9825410197a97685d6a1fa2a86196430b01877d64458a20e95d4fd00d739a08"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:267a3038f87a8565bd834317dbf053a02055915acf353bf42ededb9edaf72010"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:7b37e08e3871efe2187bc1fd9320cc81d87caf19816c648f24443483005ff886"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:91979028227543010d7b2ba2471cf1d1e398b3f183cb105ac584df0c36dac28d"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8962204c47464d5c1c4038abeadd4514a133b28748bcd9fa5b6d62e3cec6fa"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a33986a0066860f7fcf7c7bd2bc804fa90e434183645595ae7b33d01f3c91ed8"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7947ab8d589e3178da3d7cdeabe14f841b391e17046954f2fbcd941705762b5"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c25e30a20e1b426e1f0fa00131c516f16e474204eee1139d1603e132acffc314"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:da4fa650cef02db88c2b98acc5434461e027dce0ae8c22dd5a69013eaf510006"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58b851b9effd0d072d4ca2e4542bf2a4abcf13c82a29fd2c93ce27ee2a2e9462"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:a9d446e844f08299236780f2efa9898c818fe7e02f17263866b8550c7d5fb328"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bc65bdd9fc8de7a35a74cab1c898cab391a4add33a8fe740bda00f5976ca4708"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c885da45e70139464f082018ac527fdaad26f1657a99ee13eecdce0f0ca24ab4"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f489c4765093fb60e2edafdf223397bc716491b2b69fe74367b70d6999257a5c"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdc69d0d3d989a1029df0eed67cc5e8e5d968f3724f4519bd03e0ec68df7543c"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bbcb1dd0f646484939e142462d9e532482bc74475cecf9c4903d4e1cd21f003"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:8a4fcd42ccb04c31268d1efeecfccfd1249612b4de6374205376b8f280321744"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:55ccbe27f049743a4caf4f4221b166560d3438d0b1e5ab929e07ae1702a4d6fd"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-win32.whl", hash = "sha256:189afbc87f0b9f158386bf051f720e20fa6145975f1e76369303d0f31d1a8d7c"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:52e5ca58c3a0b0bd5e100a9fbc8015059b05cffc6c66ce9d98b4b45e023443b9"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:02d87b80778c171445d67e23d1caef279bf4b25c3597050ccd2e13970b57fd51"},
|
||||
{file = "pycryptodomex-3.23.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:febec69c0291efd056c65691b6d9a339f8b4bc43c6635b8699471248fe897fea"},
|
||||
{file = "pycryptodomex-3.23.0-pp27-pypy_73-win32.whl", hash = "sha256:c84b239a1f4ec62e9c789aafe0543f0594f0acd90c8d9e15bcece3efe55eca66"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7de1e40a41a5d7f1ac42b6569b10bcdded34339950945948529067d8426d2785"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bffc92138d75664b6d543984db7893a628559b9e78658563b0395e2a5fb47ed9"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df027262368334552db2c0ce39706b3fb32022d1dce34673d0f9422df004b96a"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e79f1aaff5a3a374e92eb462fa9e598585452135012e2945f96874ca6eeb1ff"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:27e13c80ac9a0a1d050ef0a7e0a18cc04c8850101ec891815b6c5a0375e8a245"},
|
||||
{file = "pycryptodomex-3.23.0.tar.gz", hash = "sha256:71909758f010c82bc99b0abf4ea12012c98962fbf0583c2164f8b84533c2e4da"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.12.5"
|
||||
@@ -4908,6 +5021,25 @@ files = [
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.14.1"
|
||||
|
||||
[[package]]
|
||||
name = "pydash"
|
||||
version = "8.0.6"
|
||||
description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library."
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"okta\""
|
||||
files = [
|
||||
{file = "pydash-8.0.6-py3-none-any.whl", hash = "sha256:ee70a81a5b292c007f28f03a4ee8e75c1f5d7576df5457b836ec7ab2839cc5d0"},
|
||||
{file = "pydash-8.0.6.tar.gz", hash = "sha256:b2821547e9723f69cf3a986be4db64de41730be149b2641947ecd12e1e11025a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">3.10,<4.6.0 || >4.6.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["build", "coverage", "furo", "invoke", "mypy", "pytest", "pytest-cov", "pytest-mypy-testing", "ruff", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "pyflakes"
|
||||
version = "3.2.0"
|
||||
@@ -5240,65 +5372,85 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.2"
|
||||
version = "6.0.3"
|
||||
description = "YAML parser and emitter for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
|
||||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"},
|
||||
{file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6447,15 +6599,19 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "xmltodict"
|
||||
version = "0.14.2"
|
||||
version = "1.0.4"
|
||||
description = "Makes working with XML feel like you are working with JSON"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["dev"]
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"},
|
||||
{file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"},
|
||||
{file = "xmltodict-1.0.4-py3-none-any.whl", hash = "sha256:a4a00d300b0e1c59fc2bfccb53d7b2e88c32f200df138a0dd2229f842497026a"},
|
||||
{file = "xmltodict-1.0.4.tar.gz", hash = "sha256:6d94c9f834dd9e44514162799d344d815a3a4faec913717a9ecbfa5be1bb8e61"},
|
||||
]
|
||||
markers = {main = "extra == \"okta\""}
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "yarl"
|
||||
@@ -6732,7 +6888,10 @@ files = [
|
||||
{file = "zstd-1.5.7.2.tar.gz", hash = "sha256:6d8684c69009be49e1b18ec251a5eb0d7e24f93624990a8a124a1da66a92fc8a"},
|
||||
]
|
||||
|
||||
[extras]
|
||||
okta = ["okta"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "d7e2ad41783a864bb845f63ccc10c88ae1e4ac36d61993ea106bbb4a5f58a843"
|
||||
content-hash = "95bcc1e65c79519df1fa78351a85986d1891d08f615fc7afaed754f268a0c944"
|
||||
|
||||
@@ -14,15 +14,19 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Action | Skill |
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding a compliance output formatter (per-provider class + table dispatcher) | `prowler-compliance` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| Auditing check-to-requirement mappings as a cloud auditor | `prowler-compliance` |
|
||||
| Create PR that requires changelog entry | `prowler-changelog` |
|
||||
| Creating new checks | `prowler-sdk-check` |
|
||||
| Creating/updating compliance frameworks | `prowler-compliance` |
|
||||
| Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs) | `prowler-compliance` |
|
||||
| Mapping checks to compliance controls | `prowler-compliance` |
|
||||
| Mocking AWS with moto in tests | `prowler-test-sdk` |
|
||||
| Review changelog format and conventions | `prowler-changelog` |
|
||||
| Reviewing compliance framework PRs | `prowler-compliance-review` |
|
||||
| Syncing compliance framework with upstream catalog | `prowler-compliance` |
|
||||
| Update CHANGELOG.md in any component | `prowler-changelog` |
|
||||
| Updating existing checks and metadata | `prowler-sdk-check` |
|
||||
| Writing Prowler SDK tests | `prowler-test-sdk` |
|
||||
|
||||
@@ -2,16 +2,49 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [5.26.0] (Prowler UNRELEASED)
|
||||
## [5.27.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `entra_service_principal_no_secrets_for_permanent_tier0_roles` check for M365 provider [(#10788)](https://github.com/prowler-cloud/prowler/pull/10788)
|
||||
- `iam_user_access_not_stale_to_sagemaker` check for AWS provider with configurable `max_unused_sagemaker_access_days` (default 90) [(#11000)](https://github.com/prowler-cloud/prowler/pull/11000)
|
||||
- `cloudtrail_bedrock_logging_enabled` check for AWS provider [(#10858)](https://github.com/prowler-cloud/prowler/pull/10858)
|
||||
- Okta provider with OAuth 2.0 authentication and `signon_global_session_idle_timeout_15min` check [(#11079)](https://github.com/prowler-cloud/prowler/pull/11079)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- `entra_emergency_access_exclusion` check for M365 provider now scopes the exclusion requirement to enabled Conditional Access policies with a `Block` grant control instead of every enabled policy, focusing on the lockout-relevant policy set [(#10849)](https://github.com/prowler-cloud/prowler/pull/10849)
|
||||
- `okta` SDK dependency moved to a new `okta` optional extra; install with `pip install prowler[okta]` (or `poetry install --all-extras`) to use the Okta provider. Prevents resolution conflicts with downstream consumers pinning the legacy `okta<1.0.0` package
|
||||
|
||||
---
|
||||
|
||||
## [5.26.2] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `entra_users_mfa_capable` and `entra_break_glass_account_fido2_security_key_registered` report a preventive FAIL per affected user (with the missing permission named) when the M365 service principal lacks `AuditLog.Read.All`, instead of mass false positives [(#10907)](https://github.com/prowler-cloud/prowler/pull/10907)
|
||||
|
||||
---
|
||||
|
||||
## [5.26.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `entra_users_mfa_capable` no longer flags disabled guest users by requesting `accountEnabled` and `userType` from Microsoft Graph via `$select` and using Graph as the source of truth for `account_enabled` (EXO `Get-User` does not return guest users) [(#11002)](https://github.com/prowler-cloud/prowler/pull/11002)
|
||||
|
||||
---
|
||||
|
||||
## [5.26.0] (Prowler v5.26.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844)
|
||||
- Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
|
||||
- Universal compliance with OCSF support [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
|
||||
- ASD Essential Eight Maturity Model compliance framework for AWS (Maturity Level One, Nov 2023) [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808)
|
||||
- Update Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663)
|
||||
- Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663)
|
||||
- `bedrock_prompt_management_exists` check for AWS provider [(#10878)](https://github.com/prowler-cloud/prowler/pull/10878)
|
||||
- 8 Gmail attachment safety and spoofing protection checks for Google Workspace provider using the Cloud Identity Policy API [(#10980)](https://github.com/prowler-cloud/prowler/pull/10980)
|
||||
- `bedrock_prompt_encrypted_with_cmk` check for AWS provider [(#10905)](https://github.com/prowler-cloud/prowler/pull/10905)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
|
||||
@@ -154,6 +154,7 @@ from prowler.providers.llm.models import LLMOutputOptions
|
||||
from prowler.providers.m365.models import M365OutputOptions
|
||||
from prowler.providers.mongodbatlas.models import MongoDBAtlasOutputOptions
|
||||
from prowler.providers.nhn.models import NHNOutputOptions
|
||||
from prowler.providers.okta.models import OktaOutputOptions
|
||||
from prowler.providers.openstack.models import OpenStackOutputOptions
|
||||
from prowler.providers.oraclecloud.models import OCIOutputOptions
|
||||
from prowler.providers.vercel.models import VercelOutputOptions
|
||||
@@ -426,6 +427,10 @@ def prowler():
|
||||
output_options = VercelOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "okta":
|
||||
output_options = OktaOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
|
||||
@@ -550,6 +550,7 @@
|
||||
"apigatewayv2_api_access_logging_enabled",
|
||||
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"directoryservice_directory_log_forwarding_enabled",
|
||||
|
||||
@@ -3461,6 +3461,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"kinesis_stream_data_retention_period",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events"
|
||||
]
|
||||
},
|
||||
@@ -3669,6 +3670,7 @@
|
||||
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
@@ -5288,6 +5290,7 @@
|
||||
"cognito_user_pool_blocks_compromised_credentials_sign_in_attempts",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_secret_unused"
|
||||
@@ -6359,6 +6362,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_administrator_access_policy",
|
||||
"iam_user_console_access_unused",
|
||||
@@ -6473,6 +6477,7 @@
|
||||
"backup_recovery_point_encrypted",
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudfront_distributions_field_level_encryption_enabled",
|
||||
"cloudfront_distributions_origin_traffic_encrypted",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
@@ -6730,6 +6735,7 @@
|
||||
"backup_recovery_point_encrypted",
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudfront_distributions_field_level_encryption_enabled",
|
||||
"cloudfront_distributions_origin_traffic_encrypted",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
|
||||
@@ -1958,6 +1958,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
|
||||
@@ -1311,6 +1311,7 @@
|
||||
"glue_development_endpoints_job_bookmark_encryption_enabled",
|
||||
"glue_ml_transform_encrypted_at_rest",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"codebuild_project_s3_logs_encrypted",
|
||||
"codebuild_report_group_export_encrypted"
|
||||
]
|
||||
@@ -3100,6 +3101,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_user_two_active_access_key"
|
||||
@@ -3442,6 +3444,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_user_no_setup_initial_access_key"
|
||||
@@ -3551,6 +3554,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_rotate_access_key_90_days",
|
||||
@@ -5853,6 +5857,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
|
||||
@@ -544,6 +544,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -109,6 +109,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
@@ -325,6 +326,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"organizations_delegated_administrators"
|
||||
|
||||
@@ -39,6 +39,7 @@
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"securityhub_enabled"
|
||||
@@ -109,6 +110,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -165,6 +167,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -185,6 +188,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -320,6 +324,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -434,6 +439,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
@@ -589,6 +595,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
|
||||
@@ -119,6 +119,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_restapi_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
|
||||
@@ -87,6 +87,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_restapi_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
@@ -632,6 +633,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_restapi_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
|
||||
@@ -869,6 +869,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -247,6 +247,7 @@
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
@@ -1293,6 +1294,7 @@
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
@@ -1767,6 +1769,7 @@
|
||||
"backup_recovery_point_encrypted",
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudfront_distributions_field_level_encryption_enabled",
|
||||
"cloudfront_distributions_origin_traffic_encrypted",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
|
||||
@@ -2115,6 +2115,7 @@
|
||||
"Checks": [
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
@@ -2539,6 +2540,7 @@
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_bucket_requires_mfa_delete",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_insights_exist",
|
||||
|
||||
@@ -2117,6 +2117,7 @@
|
||||
"Checks": [
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
|
||||
@@ -171,6 +171,7 @@
|
||||
"iam_no_expired_server_certificates_stored",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_no_root_access_key",
|
||||
|
||||
@@ -1913,6 +1913,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
],
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -76,6 +77,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -164,6 +166,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -589,6 +592,7 @@
|
||||
"iam_password_policy_expires_passwords_within_90_days_or_less",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"securityhub_enabled"
|
||||
@@ -43,6 +44,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -116,6 +118,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
@@ -240,6 +243,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_url_public",
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
@@ -53,6 +54,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -74,6 +76,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -95,6 +98,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -116,6 +120,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -136,6 +141,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -247,6 +253,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -285,6 +292,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -861,6 +869,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
@@ -1199,6 +1208,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -1594,6 +1604,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
@@ -2152,6 +2163,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
@@ -2179,6 +2191,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
|
||||
@@ -577,6 +577,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
@@ -638,6 +639,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -707,6 +707,7 @@
|
||||
"iam_user_console_access_unused",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_two_active_access_key",
|
||||
"iam_root_credentials_management_enabled",
|
||||
@@ -903,6 +904,7 @@
|
||||
"Checks": [
|
||||
"backup_vaults_encrypted",
|
||||
"backup_recovery_point_encrypted",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"s3_bucket_kms_encryption",
|
||||
@@ -1310,6 +1312,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
@@ -1473,6 +1476,7 @@
|
||||
"cloudtrail_threat_detection_enumeration",
|
||||
"cloudtrail_threat_detection_privilege_escalation",
|
||||
"cloudtrail_threat_detection_llm_jacking",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events"
|
||||
]
|
||||
@@ -1569,6 +1573,7 @@
|
||||
"cloudtrail_threat_detection_llm_jacking",
|
||||
"cloudtrail_threat_detection_enumeration",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_log_metric_filter_unauthorized_api_calls",
|
||||
"cloudwatch_log_metric_filter_authentication_failures",
|
||||
|
||||
@@ -1563,6 +1563,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
|
||||
@@ -295,6 +295,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_no_expired_server_certificates_stored"
|
||||
@@ -340,6 +341,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"accessanalyzer_enabled_without_findings"
|
||||
@@ -816,6 +818,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
|
||||
@@ -346,6 +346,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
|
||||
@@ -251,6 +251,7 @@
|
||||
"entra_break_glass_account_fido2_security_key_registered",
|
||||
"entra_conditional_access_policy_mfa_enforced_for_guest_users",
|
||||
"entra_default_app_management_policy_enabled",
|
||||
"entra_emergency_access_exclusion",
|
||||
"entra_all_apps_conditional_access_coverage",
|
||||
"entra_conditional_access_policy_device_registration_mfa_required",
|
||||
"entra_intune_enrollment_sign_in_frequency_every_time",
|
||||
@@ -260,6 +261,7 @@
|
||||
"entra_legacy_authentication_blocked",
|
||||
"entra_managed_device_required_for_authentication",
|
||||
"entra_seamless_sso_disabled",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles",
|
||||
"entra_users_mfa_enabled",
|
||||
"exchange_organization_modern_authentication_enabled",
|
||||
"exchange_transport_config_smtp_auth_disabled",
|
||||
@@ -282,6 +284,7 @@
|
||||
"entra_admin_portals_access_restriction",
|
||||
"entra_app_registration_no_unused_privileged_permissions",
|
||||
"entra_policy_guest_users_access_restrictions",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles",
|
||||
"sharepoint_external_sharing_managed",
|
||||
"sharepoint_external_sharing_restricted",
|
||||
"sharepoint_guest_sharing_restricted"
|
||||
@@ -671,10 +674,12 @@
|
||||
"entra_admin_users_phishing_resistant_mfa_enabled",
|
||||
"entra_admin_users_sign_in_frequency_enabled",
|
||||
"entra_break_glass_account_fido2_security_key_registered",
|
||||
"entra_emergency_access_exclusion",
|
||||
"entra_app_registration_no_unused_privileged_permissions",
|
||||
"entra_policy_ensure_default_user_cannot_create_tenants",
|
||||
"entra_policy_guest_invite_only_for_admin_roles",
|
||||
"entra_seamless_sso_disabled"
|
||||
"entra_seamless_sso_disabled",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -727,9 +732,11 @@
|
||||
"entra_conditional_access_policy_device_code_flow_blocked",
|
||||
"entra_conditional_access_policy_directory_sync_account_excluded",
|
||||
"entra_conditional_access_policy_corporate_device_sign_in_frequency_enforced",
|
||||
"entra_emergency_access_exclusion",
|
||||
"entra_identity_protection_sign_in_risk_enabled",
|
||||
"entra_managed_device_required_for_authentication",
|
||||
"entra_seamless_sso_disabled",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles",
|
||||
"entra_users_mfa_enabled"
|
||||
]
|
||||
},
|
||||
|
||||