Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5198b0251c | |||
| 9894ac7bc3 | |||
| 8ae076f22c | |||
| 27fb8518cd | |||
| 375de5dc87 | |||
| 3410fc927a | |||
| dcf91ef252 | |||
| bf4fd8fabd | |||
| 3d65208fd3 | |||
| 7d3ed62e90 | |||
| 5f92989492 | |||
| 6befa78978 | |||
| 78af0c24fe | |||
| 1bb547e5e1 | |||
| 1f39b01fb2 | |||
| fb0ef391f2 | |||
| f2e6a3264d | |||
| 9bd4e4b65c | |||
| 68ffb2b219 | |||
| 739be07077 | |||
| 0abbb7fc59 | |||
| 0b4393776c | |||
| 4dd5baadf6 | |||
| 934d995661 | |||
| ccdc01ed7b | |||
| d84099e87a | |||
| cf55f7eb43 | |||
| 9293c7b58d | |||
| a883bb30d4 | |||
| e476bbde2d | |||
| 7f3dcdf02f | |||
| 132e79df89 | |||
| b2ed9ee221 | |||
| def2d3d188 |
@@ -2,20 +2,19 @@
|
||||
# Runs automatically on `wt switch --create`.
|
||||
|
||||
# Block 1: setup + copy gitignored env files (.envrc, ui/.env.local)
|
||||
# from the primary worktree — patterns selected via .worktreeinclude.
|
||||
# from the primary worktree - patterns selected via .worktreeinclude.
|
||||
[[pre-start]]
|
||||
skills = "./skills/setup.sh --claude"
|
||||
python = "poetry env use python3.12"
|
||||
envs = "wt step copy-ignored"
|
||||
|
||||
# Block 2: install Python deps (requires `poetry env use` from block 1).
|
||||
# Block 2: install Python deps (uv manages the venv on `uv sync`).
|
||||
[[pre-start]]
|
||||
deps = "poetry install --with dev"
|
||||
deps = "uv sync"
|
||||
|
||||
# Block 3: reminder — last visible output before `wt switch` returns.
|
||||
# Block 3: reminder - last visible output before `wt switch` returns.
|
||||
# Hooks can't mutate the parent shell, so venv activation is manual.
|
||||
[[pre-start]]
|
||||
reminder = "echo '>> Reminder: activate the venv in this shell with: eval $(poetry env activate)'"
|
||||
reminder = "echo '>> Reminder: activate the venv in this shell with: source .venv/bin/activate'"
|
||||
|
||||
# Background: pnpm install runs while you start working.
|
||||
# Tail logs via `wt config state logs`.
|
||||
|
||||
@@ -0,0 +1,169 @@
|
||||
name: 'OSV-Scanner'
|
||||
description: 'Install osv-scanner and scan a lockfile, failing on HIGH/CRITICAL/UNKNOWN severity findings. Posts/updates a PR comment with findings on pull_request events (requires pull-requests: write).'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
lockfile:
|
||||
description: 'Path to the lockfile to scan, relative to the repository root (e.g. uv.lock, api/uv.lock, ui/pnpm-lock.yaml).'
|
||||
required: true
|
||||
severity-levels:
|
||||
description: 'Comma-separated severity levels that fail the scan. Default: HIGH,CRITICAL,UNKNOWN.'
|
||||
required: false
|
||||
default: 'HIGH,CRITICAL,UNKNOWN'
|
||||
version:
|
||||
description: 'osv-scanner release tag to install. When overriding, you MUST also override binary-sha256.'
|
||||
required: false
|
||||
default: 'v2.3.8'
|
||||
binary-sha256:
|
||||
description: 'Expected SHA256 of osv-scanner_linux_amd64 for the given version. Default tracks v2.3.8. See https://github.com/google/osv-scanner/releases/download/<version>/osv-scanner_SHA256SUMS.'
|
||||
required: false
|
||||
default: 'bc98e15319ed0d515e3f9235287ba53cdc5535d576d24fd573978ecfe9ab92dc'
|
||||
post-pr-comment:
|
||||
description: 'Post or update a PR comment with the scan report. Only effective on pull_request events. Requires pull-requests: write permission on the caller job.'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Install osv-scanner
|
||||
shell: bash
|
||||
env:
|
||||
OSV_SCANNER_VERSION: ${{ inputs.version }}
|
||||
# Download the binary AND the published SHA256SUMS file, then verify the
|
||||
# binary checksum against the upstream-signed manifest. Aborts on mismatch.
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v osv-scanner >/dev/null 2>&1; then
|
||||
INSTALLED="$(osv-scanner --version 2>&1 | awk '/scanner version/ {print $NF; exit}')"
|
||||
if [ "v${INSTALLED}" = "${OSV_SCANNER_VERSION}" ]; then
|
||||
echo "osv-scanner ${OSV_SCANNER_VERSION} already installed."
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
BASE="https://github.com/google/osv-scanner/releases/download/${OSV_SCANNER_VERSION}"
|
||||
BIN_NAME="osv-scanner_linux_amd64"
|
||||
curl -fSL --retry 3 "${BASE}/${BIN_NAME}" -o "${RUNNER_TEMP}/${BIN_NAME}"
|
||||
curl -fSL --retry 3 "${BASE}/osv-scanner_SHA256SUMS" -o "${RUNNER_TEMP}/osv-scanner_SHA256SUMS"
|
||||
(cd "${RUNNER_TEMP}" && sha256sum --check --ignore-missing osv-scanner_SHA256SUMS)
|
||||
chmod +x "${RUNNER_TEMP}/${BIN_NAME}"
|
||||
sudo mv "${RUNNER_TEMP}/${BIN_NAME}" /usr/local/bin/osv-scanner
|
||||
rm -f "${RUNNER_TEMP}/osv-scanner_SHA256SUMS"
|
||||
osv-scanner --version
|
||||
|
||||
- name: Run osv-scanner
|
||||
id: scan
|
||||
shell: bash
|
||||
working-directory: ${{ github.workspace }}
|
||||
env:
|
||||
OSV_LOCKFILE: ${{ inputs.lockfile }}
|
||||
OSV_SEVERITY_LEVELS: ${{ inputs.severity-levels }}
|
||||
OSV_REPORT_FILE: ${{ runner.temp }}/osv-scanner-findings.json
|
||||
# Per-vulnerability ignores (reason + expiry) live in osv-scanner.toml at the repo root, if present.
|
||||
# Severity filter is enforced in the wrapper via OSV_SEVERITY_LEVELS.
|
||||
# `continue-on-error: true` lets the PR-comment step run even when findings exist;
|
||||
# the gate step below re-fails the job from the wrapper exit code.
|
||||
continue-on-error: true
|
||||
run: ./.github/scripts/osv-scan.sh --lockfile="${OSV_LOCKFILE}"
|
||||
|
||||
- name: Post osv-scanner report on PR
|
||||
if: >-
|
||||
always()
|
||||
&& inputs.post-pr-comment == 'true'
|
||||
&& github.event_name == 'pull_request'
|
||||
&& github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
OSV_REPORT_FILE: ${{ runner.temp }}/osv-scanner-findings.json
|
||||
OSV_LOCKFILE: ${{ inputs.lockfile }}
|
||||
OSV_SEVERITY_LEVELS: ${{ inputs.severity-levels }}
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const lockfile = process.env.OSV_LOCKFILE;
|
||||
const severityLevels = process.env.OSV_SEVERITY_LEVELS;
|
||||
const reportFile = process.env.OSV_REPORT_FILE;
|
||||
const marker = `<!-- osv-scanner-report:${lockfile} -->`;
|
||||
const runUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
|
||||
|
||||
let findings = [];
|
||||
if (fs.existsSync(reportFile)) {
|
||||
try {
|
||||
findings = JSON.parse(fs.readFileSync(reportFile, 'utf8'));
|
||||
} catch (err) {
|
||||
core.warning(`Could not parse ${reportFile}: ${err.message}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
const existing = comments.find(c => c.body?.includes(marker));
|
||||
|
||||
if (findings.length === 0) {
|
||||
if (existing) {
|
||||
await github.rest.issues.deleteComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: existing.id,
|
||||
});
|
||||
core.info(`Deleted stale osv-scanner comment for ${lockfile}.`);
|
||||
} else {
|
||||
core.info(`No findings and no stale comment for ${lockfile}.`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const sevIcon = (s) => ({
|
||||
CRITICAL: '🔴', HIGH: '🟠', MEDIUM: '🟡', LOW: '🟢', UNKNOWN: '⚪',
|
||||
}[s] || '⚪');
|
||||
const escape = (s) => String(s ?? '').replace(/\|/g, '\\|').replace(/\n/g, ' ');
|
||||
const rows = findings.map(f =>
|
||||
`| ${sevIcon(f.severity)} ${f.severity}${f.score ? ` (${f.score})` : ''} | \`${escape(f.id)}\` | \`${escape(f.ecosystem)}/${escape(f.package)}\` | \`${escape(f.version)}\` | ${escape(f.summary || '(no summary)')} |`
|
||||
);
|
||||
|
||||
const body = [
|
||||
marker,
|
||||
`## 🔒 osv-scanner: ${findings.length} finding(s) in \`${lockfile}\``,
|
||||
'',
|
||||
`Severity gate: \`${severityLevels}\``,
|
||||
'',
|
||||
'| Severity | ID | Package | Version | Summary |',
|
||||
'|----------|----|---------|---------|---------|',
|
||||
...rows,
|
||||
'',
|
||||
`To accept a finding, add an \`[[IgnoredVulns]]\` entry to \`osv-scanner.toml\` at the repo root with a reason and \`ignoreUntil\`.`,
|
||||
'',
|
||||
`<sub>[View run](${runUrl})</sub>`,
|
||||
].join('\n');
|
||||
|
||||
if (existing) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: existing.id,
|
||||
body,
|
||||
});
|
||||
core.info(`Updated osv-scanner comment for ${lockfile}.`);
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body,
|
||||
});
|
||||
core.info(`Posted new osv-scanner comment for ${lockfile}.`);
|
||||
}
|
||||
|
||||
- name: Enforce osv-scanner severity gate
|
||||
shell: bash
|
||||
env:
|
||||
SCAN_OUTCOME: ${{ steps.scan.outcome }}
|
||||
run: |
|
||||
if [ "${SCAN_OUTCOME}" != "success" ]; then
|
||||
echo "osv-scanner gate: scan reported findings (outcome=${SCAN_OUTCOME})" >&2
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,5 +1,5 @@
|
||||
name: 'Setup Python with Poetry'
|
||||
description: 'Setup Python environment with Poetry and install dependencies'
|
||||
name: 'Setup Python with uv'
|
||||
description: 'Setup Python environment with uv and install dependencies'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
@@ -7,23 +7,15 @@ inputs:
|
||||
description: 'Python version to use'
|
||||
required: true
|
||||
working-directory:
|
||||
description: 'Working directory for Poetry'
|
||||
description: 'Working directory for uv'
|
||||
required: false
|
||||
default: '.'
|
||||
poetry-version:
|
||||
description: 'Poetry version to install'
|
||||
uv-version:
|
||||
description: 'uv version to install'
|
||||
required: false
|
||||
default: '2.3.4'
|
||||
default: '0.11.14'
|
||||
install-dependencies:
|
||||
description: 'Install Python dependencies with Poetry'
|
||||
required: false
|
||||
default: 'true'
|
||||
update-lock:
|
||||
description: 'Run `poetry lock` during setup. Only enable when a prior step mutates pyproject.toml (e.g. API `@master` VCS rewrite). Default: false.'
|
||||
required: false
|
||||
default: 'false'
|
||||
enable-cache:
|
||||
description: 'Whether to enable Poetry dependency caching via actions/setup-python'
|
||||
description: 'Install Python dependencies with uv'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
@@ -47,54 +39,52 @@ runs:
|
||||
sed -i "s|\(git+https://github.com/prowler-cloud/prowler[^@]*\)@master|\1@$BRANCH_NAME|g" pyproject.toml
|
||||
fi
|
||||
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==${INPUTS_POETRY_VERSION}
|
||||
env:
|
||||
INPUTS_POETRY_VERSION: ${{ inputs.poetry-version }}
|
||||
|
||||
- name: Update poetry.lock with latest Prowler commit
|
||||
- name: Update uv.lock with latest Prowler commit
|
||||
if: github.repository_owner == 'prowler-cloud' && github.repository != 'prowler-cloud/prowler'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
|
||||
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
|
||||
}' poetry.lock
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
sed -i "s|\(git = \"https://github\.com/prowler-cloud/prowler\.git?rev=master\)#[a-f0-9]\{40\}\"|\1#${LATEST_COMMIT}\"|g" uv.lock
|
||||
echo "Updated uv.lock entry:"
|
||||
grep "prowler-cloud/prowler" uv.lock
|
||||
|
||||
- name: Update poetry.lock (prowler repo only)
|
||||
if: github.repository == 'prowler-cloud/prowler' && inputs.update-lock == 'true'
|
||||
- name: Update uv.lock SDK commit (prowler repo on push)
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'prowler-cloud/prowler'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: poetry lock
|
||||
run: |
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
sed -i "s|\(git = \"https://github\.com/prowler-cloud/prowler\.git?rev=master\)#[a-f0-9]\{40\}\"|\1#${LATEST_COMMIT}\"|g" uv.lock
|
||||
echo "Updated uv.lock entry:"
|
||||
grep "prowler-cloud/prowler" uv.lock
|
||||
|
||||
- name: Install uv
|
||||
shell: bash
|
||||
env:
|
||||
UV_VERSION: ${{ inputs.uv-version }}
|
||||
run: pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir "uv==${UV_VERSION}"
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
# Disable cache when callers skip dependency install: Poetry 2.3.4 creates
|
||||
# the venv in a path setup-python can't hash, breaking the post-step save-cache.
|
||||
cache: ${{ inputs.enable-cache == 'true' && 'poetry' || '' }}
|
||||
cache-dependency-path: ${{ inputs.enable-cache == 'true' && format('{0}/poetry.lock', inputs.working-directory) || '' }}
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install Python dependencies
|
||||
if: inputs.install-dependencies == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
uv sync --no-install-project
|
||||
uv run pip list
|
||||
|
||||
- name: Update Prowler Cloud API Client
|
||||
if: github.repository_owner == 'prowler-cloud' && github.repository != 'prowler-cloud/prowler'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry remove prowler-cloud-api-client
|
||||
poetry add ./prowler-cloud-api-client
|
||||
uv remove prowler-cloud-api-client
|
||||
uv add ./prowler-cloud-api-client
|
||||
@@ -72,6 +72,11 @@ provider/vercel:
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/**"
|
||||
|
||||
provider/okta:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/okta/**"
|
||||
- any-glob-to-any-file: "tests/providers/okta/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -109,6 +114,8 @@ mutelist:
|
||||
- any-glob-to-any-file: "tests/providers/googleworkspace/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/okta/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/okta/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
|
||||
@@ -36,6 +36,7 @@ Please add a detailed description of how to review this PR.
|
||||
|
||||
#### UI
|
||||
- [ ] All issue/task requirements work as expected on the UI
|
||||
- [ ] If this PR adds or updates npm dependencies, include package-health evidence (maintenance, popularity, known vulnerabilities, license, release age) and explain why existing/native alternatives are insufficient.
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Mobile (X < 640px)
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Table (640px > X < 1024px)
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Desktop (X > 1024px)
|
||||
@@ -48,7 +49,7 @@ Please add a detailed description of how to review this PR.
|
||||
- [ ] Performance test results (if applicable)
|
||||
- [ ] Any other relevant evidence of the implementation (if applicable)
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
- [ ] Check if version updates are required (e.g., specs, Poetry, etc.).
|
||||
- [ ] Check if version updates are required (e.g., specs, uv, etc.).
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/api/CHANGELOG.md), if applicable.
|
||||
|
||||
### License
|
||||
|
||||
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env bash
|
||||
# Run osv-scanner and fail when findings match the configured severity levels.
|
||||
#
|
||||
# Replaces `safety check --policy-file .safety-policy.yml`. Used by:
|
||||
# - .github/actions/osv-scanner/action.yml (composite action)
|
||||
# - .github/workflows/api-security.yml, sdk-security.yml, ui-security.yml
|
||||
#
|
||||
# Severity levels (comma-separated) are read from OSV_SEVERITY_LEVELS.
|
||||
# Default: HIGH,CRITICAL,UNKNOWN — preserves prior .safety-policy.yml policy
|
||||
# (ignore-cvss-severity-below: 7 + ignore-cvss-unknown-severity: False).
|
||||
# osv-scanner has no native CVSS threshold (google/osv-scanner#1400, closed
|
||||
# not-planned). Severity is derived from $group.max_severity (numeric CVSS
|
||||
# score string) which osv-scanner emits per group.
|
||||
#
|
||||
# CVSS v3 score → categorical label:
|
||||
# CRITICAL >= 9.0
|
||||
# HIGH >= 7.0
|
||||
# MEDIUM >= 4.0
|
||||
# LOW > 0.0
|
||||
# UNKNOWN no score available
|
||||
#
|
||||
# Per-vulnerability ignores (with reason + expiry) live in osv-scanner.toml at
|
||||
# the repo root, if it exists. Without that file, osv-scanner uses defaults.
|
||||
#
|
||||
# Usage:
|
||||
# osv-scan.sh [osv-scanner pass-through args...]
|
||||
# Examples:
|
||||
# osv-scan.sh --lockfile=uv.lock
|
||||
# osv-scan.sh --recursive .
|
||||
# OSV_SEVERITY_LEVELS=CRITICAL osv-scan.sh --lockfile=uv.lock
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$(git rev-parse --show-toplevel)"
|
||||
CONFIG="${ROOT}/osv-scanner.toml"
|
||||
SEVERITY_LEVELS="${OSV_SEVERITY_LEVELS:-HIGH,CRITICAL,UNKNOWN}"
|
||||
|
||||
for bin in osv-scanner jq; do
|
||||
if ! command -v "${bin}" >/dev/null 2>&1; then
|
||||
echo "error: ${bin} not found in PATH" >&2
|
||||
exit 2
|
||||
fi
|
||||
done
|
||||
|
||||
SCAN_ARGS=()
|
||||
if [ -f "${CONFIG}" ]; then
|
||||
SCAN_ARGS+=(--config="${CONFIG}")
|
||||
fi
|
||||
|
||||
# Exit codes: 0=clean, 1=findings, 127=no supported files, 128=internal error.
|
||||
STDERR="$(mktemp)"
|
||||
trap 'rm -f "${STDERR}"' EXIT
|
||||
|
||||
set +e
|
||||
OUTPUT="$(osv-scanner scan source "${SCAN_ARGS[@]}" --format=json "$@" 2>"${STDERR}")"
|
||||
RC=$?
|
||||
set -e
|
||||
|
||||
case "${RC}" in
|
||||
0|1) ;;
|
||||
127) echo "osv-scanner: no supported lockfiles in scan target"; exit 0 ;;
|
||||
*)
|
||||
echo "osv-scanner: exited with code ${RC}" >&2
|
||||
[ -s "${STDERR}" ] && cat "${STDERR}" >&2
|
||||
exit "${RC}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Build a JSON array of normalized severity levels for jq.
|
||||
SEVERITY_JSON="$(printf '%s' "${SEVERITY_LEVELS}" | jq -Rc '
|
||||
split(",") | map(ascii_upcase | sub("^\\s+"; "") | sub("\\s+$"; ""))
|
||||
')"
|
||||
|
||||
# Walk each vulnerability, look up its group's max_severity (numeric CVSS),
|
||||
# map to a categorical label, then filter by OSV_SEVERITY_LEVELS.
|
||||
FINDINGS="$(printf '%s' "${OUTPUT}" | jq --argjson sevs "${SEVERITY_JSON}" '
|
||||
[ .results[]?.packages[]?
|
||||
| . as $pkg
|
||||
| ($pkg.groups // []) as $groups
|
||||
| ($pkg.vulnerabilities // [])[]
|
||||
| . as $vuln
|
||||
| ([ $groups[] | select((.ids // []) | index($vuln.id)) ][0] // {}) as $group
|
||||
| (($group.max_severity // "") | tonumber? // null) as $score
|
||||
| (if $score == null then "UNKNOWN"
|
||||
elif $score >= 9.0 then "CRITICAL"
|
||||
elif $score >= 7.0 then "HIGH"
|
||||
elif $score >= 4.0 then "MEDIUM"
|
||||
elif $score > 0 then "LOW"
|
||||
else "UNKNOWN"
|
||||
end) as $label
|
||||
| {
|
||||
id: $vuln.id,
|
||||
severity: $label,
|
||||
score: $score,
|
||||
summary: ($vuln.summary // null),
|
||||
package: $pkg.package.name,
|
||||
version: $pkg.package.version,
|
||||
ecosystem: $pkg.package.ecosystem
|
||||
}
|
||||
| select(.severity as $s | $sevs | any(. == $s))
|
||||
]
|
||||
')"
|
||||
|
||||
COUNT="$(printf '%s' "${FINDINGS}" | jq 'length')"
|
||||
|
||||
# Write the findings JSON to OSV_REPORT_FILE so callers (e.g. the composite
|
||||
# action's PR-comment step) can consume the same data the gate decision uses.
|
||||
if [ -n "${OSV_REPORT_FILE:-}" ]; then
|
||||
printf '%s' "${FINDINGS}" > "${OSV_REPORT_FILE}"
|
||||
fi
|
||||
|
||||
if [ "${COUNT}" -gt 0 ]; then
|
||||
echo "osv-scanner: ${COUNT} finding(s) at severity ${SEVERITY_LEVELS}"
|
||||
printf '%s' "${FINDINGS}" | jq -r '
|
||||
.[] | " [\(.severity)\(if .score then " \(.score)" else "" end)] \(.id) \(.ecosystem)/\(.package)@\(.version) — \(.summary // "(no summary)")"
|
||||
'
|
||||
echo
|
||||
echo "To accept a finding, create osv-scanner.toml at the repo root with a reason and ignoreUntil."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "osv-scanner: no findings at severity levels: ${SEVERITY_LEVELS}"
|
||||
@@ -43,6 +43,7 @@ jobs:
|
||||
pypi.org:443
|
||||
files.pythonhosted.org:443
|
||||
api.github.com:443
|
||||
raw.githubusercontent.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -63,26 +64,25 @@ jobs:
|
||||
api/CHANGELOG.md
|
||||
api/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Setup Python with uv
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Poetry check
|
||||
- name: uv lock check
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry check --lock
|
||||
run: uv lock --check
|
||||
|
||||
- name: Ruff lint
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run ruff check . --exclude contrib
|
||||
run: uv run ruff check . --exclude contrib
|
||||
|
||||
- name: Ruff format
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run ruff format --check . --exclude contrib
|
||||
run: uv run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Pylint
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
run: uv run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
@@ -9,7 +9,9 @@ on:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/api-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
- '.github/actions/setup-python-uv/**'
|
||||
- '.github/actions/osv-scanner/**'
|
||||
- '.github/scripts/osv-scan.sh'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
@@ -18,7 +20,9 @@ on:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/api-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
- '.github/actions/setup-python-uv/**'
|
||||
- '.github/actions/osv-scanner/**'
|
||||
- '.github/scripts/osv-scan.sh'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -35,6 +39,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write # osv-scanner action posts/updates a PR comment with findings
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
@@ -52,10 +57,12 @@ jobs:
|
||||
pypi.org:443
|
||||
files.pythonhosted.org:443
|
||||
github.com:443
|
||||
auth.safetycli.com:443
|
||||
pyup.io:443
|
||||
data.safetycli.com:443
|
||||
api.github.com:443
|
||||
objects.githubusercontent.com:443
|
||||
release-assets.githubusercontent.com:443
|
||||
api.osv.dev:443
|
||||
api.deps.dev:443
|
||||
osv-vulnerabilities.storage.googleapis.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -70,30 +77,34 @@ jobs:
|
||||
files: |
|
||||
api/**
|
||||
.github/workflows/api-security.yml
|
||||
.safety-policy.yml
|
||||
.github/actions/osv-scanner/**
|
||||
.github/scripts/osv-scan.sh
|
||||
files_ignore: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
api/CHANGELOG.md
|
||||
api/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Setup Python with uv
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Bandit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
# Exclude .venv because uv places the project venv inside ./api; otherwise
|
||||
# bandit would recurse into installed third-party packages.
|
||||
run: uv run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .
|
||||
|
||||
- name: Safety
|
||||
- name: Dependency vulnerability scan with osv-scanner
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
# Accepted CVEs, severity threshold, and ignore expirations live in ../.safety-policy.yml
|
||||
run: poetry run safety check --policy-file ../.safety-policy.yml
|
||||
uses: ./.github/actions/osv-scanner
|
||||
with:
|
||||
lockfile: api/uv.lock
|
||||
|
||||
- name: Vulture
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
# Run even when osv-scanner reports findings so dead-code signal isn't masked by SCA failures.
|
||||
if: ${{ !cancelled() && steps.check-changes.outputs.any_changed == 'true' }}
|
||||
run: uv run vulture --exclude "contrib,tests,conftest.py,.venv" --min-confidence 100 .
|
||||
|
||||
@@ -87,6 +87,7 @@ jobs:
|
||||
files.pythonhosted.org:443
|
||||
cli.codecov.io:443
|
||||
keybase.io:443
|
||||
raw.githubusercontent.com:443
|
||||
ingest.codecov.io:443
|
||||
storage.googleapis.com:443
|
||||
o26192.ingest.us.sentry.io:443
|
||||
@@ -112,17 +113,16 @@ jobs:
|
||||
api/CHANGELOG.md
|
||||
api/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Setup Python with uv
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Run tests with pytest
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
run: uv run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
ui/**
|
||||
prowler/**
|
||||
mcp_server/**
|
||||
poetry.lock
|
||||
uv.lock
|
||||
pyproject.toml
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
@@ -84,9 +84,9 @@ jobs:
|
||||
fi
|
||||
done
|
||||
|
||||
# Check root-level dependency files (poetry.lock, pyproject.toml)
|
||||
# Check root-level dependency files (uv.lock, pyproject.toml)
|
||||
# These are associated with the prowler folder changelog
|
||||
root_deps_changed=$(echo "${STEPS_CHANGED_FILES_OUTPUTS_ALL_CHANGED_FILES}" | tr ' ' '\n' | grep -E "^(poetry\.lock|pyproject\.toml)$" || true)
|
||||
root_deps_changed=$(echo "${STEPS_CHANGED_FILES_OUTPUTS_ALL_CHANGED_FILES}" | tr ' ' '\n' | grep -E "^(uv\.lock|pyproject\.toml)$" || true)
|
||||
if [ -n "$root_deps_changed" ]; then
|
||||
echo "Detected changes in root dependency files: $root_deps_changed"
|
||||
# Check if prowler/CHANGELOG.md was already updated (might have been caught above)
|
||||
|
||||
@@ -40,12 +40,11 @@ jobs:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
- name: Setup Python with uv
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: '3.12'
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
@@ -339,10 +338,11 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Update poetry lock file
|
||||
echo "Updating poetry.lock file..."
|
||||
# Update uv lock file
|
||||
echo "Updating uv.lock file..."
|
||||
pip install --no-cache-dir uv==0.11.14
|
||||
cd api
|
||||
poetry lock
|
||||
uv lock
|
||||
cd ..
|
||||
|
||||
echo "✓ Prepared prowler dependency update to: $UPDATED_PROWLER_REF"
|
||||
@@ -357,7 +357,7 @@ jobs:
|
||||
base: ${{ env.BRANCH_NAME }}
|
||||
add-paths: |
|
||||
api/pyproject.toml
|
||||
api/poetry.lock
|
||||
api/uv.lock
|
||||
title: "chore(api): Update prowler dependency to ${{ env.BRANCH_NAME }} for release ${{ env.PROWLER_VERSION }}"
|
||||
body: |
|
||||
### Description
|
||||
@@ -366,7 +366,7 @@ jobs:
|
||||
|
||||
**Changes:**
|
||||
- Updates `api/pyproject.toml` prowler dependency from `@master` to `@${{ env.BRANCH_NAME }}`
|
||||
- Updates `api/poetry.lock` file with resolved dependencies
|
||||
- Updates `api/uv.lock` file with resolved dependencies
|
||||
|
||||
This PR should be merged into the `${{ env.BRANCH_NAME }}` release branch.
|
||||
|
||||
|
||||
@@ -71,24 +71,26 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Setup Python with uv
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Check Poetry lock file
|
||||
- name: Check uv lock file
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry check --lock
|
||||
run: uv lock --check
|
||||
|
||||
- name: Lint with flake8
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api,skills
|
||||
run: uv run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude .venv,contrib,ui,api,skills,mcp_server
|
||||
|
||||
- name: Check format with black
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run black --exclude "api|ui|skills" --check .
|
||||
# mcp_server has its own pyproject and uses ruff format, exclude it so SDK black
|
||||
# does not fight ruff over rules it never formatted.
|
||||
run: uv run black --exclude "\.venv|api|ui|skills|mcp_server" --check .
|
||||
|
||||
- name: Lint with pylint
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
run: uv run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
|
||||
@@ -73,20 +73,10 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Inject poetry-bumpversion plugin
|
||||
run: pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version and set tags
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
PROWLER_VERSION="$(grep -E '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')"
|
||||
echo "prowler_version=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
|
||||
@@ -9,7 +9,7 @@ on:
|
||||
- 'prowler/**'
|
||||
- 'Dockerfile*'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'uv.lock'
|
||||
- '.github/workflows/sdk-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
@@ -19,7 +19,7 @@ on:
|
||||
- 'prowler/**'
|
||||
- 'Dockerfile*'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'uv.lock'
|
||||
- '.github/workflows/sdk-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
|
||||
@@ -75,15 +75,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
- name: Setup Python with uv
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Build Prowler package
|
||||
run: poetry build
|
||||
run: uv build
|
||||
|
||||
- name: Publish Prowler package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
@@ -112,12 +111,11 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
- name: Setup Python with uv
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Install toml package
|
||||
run: pip install toml
|
||||
@@ -128,7 +126,7 @@ jobs:
|
||||
python util/replicate_pypi_package.py
|
||||
|
||||
- name: Build prowler-cloud package
|
||||
run: poetry build
|
||||
run: uv build
|
||||
|
||||
- name: Publish prowler-cloud package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
|
||||
@@ -9,10 +9,12 @@ on:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'uv.lock'
|
||||
- '.github/workflows/sdk-tests.yml'
|
||||
- '.github/workflows/sdk-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
- '.github/actions/setup-python-uv/**'
|
||||
- '.github/actions/osv-scanner/**'
|
||||
- '.github/scripts/osv-scan.sh'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
@@ -21,10 +23,12 @@ on:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'uv.lock'
|
||||
- '.github/workflows/sdk-tests.yml'
|
||||
- '.github/workflows/sdk-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
- '.github/actions/setup-python-uv/**'
|
||||
- '.github/actions/osv-scanner/**'
|
||||
- '.github/scripts/osv-scan.sh'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -39,6 +43,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write # osv-scanner action posts/updates a PR comment with findings
|
||||
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
@@ -49,10 +54,12 @@ jobs:
|
||||
pypi.org:443
|
||||
files.pythonhosted.org:443
|
||||
github.com:443
|
||||
auth.safetycli.com:443
|
||||
pyup.io:443
|
||||
data.safetycli.com:443
|
||||
api.github.com:443
|
||||
objects.githubusercontent.com:443
|
||||
release-assets.githubusercontent.com:443
|
||||
api.osv.dev:443
|
||||
api.deps.dev:443
|
||||
osv-vulnerabilities.storage.googleapis.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -87,21 +94,23 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Setup Python with uv
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Security scan with Bandit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
|
||||
run: uv run bandit -q -lll -x '*_test.py,./.venv/,./contrib/,./api/,./ui' -r .
|
||||
|
||||
- name: Security scan with Safety
|
||||
- name: Dependency vulnerability scan with osv-scanner
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
# Accepted CVEs, severity threshold, and ignore expirations live in .safety-policy.yml
|
||||
run: poetry run safety check -r pyproject.toml --policy-file .safety-policy.yml
|
||||
uses: ./.github/actions/osv-scanner
|
||||
with:
|
||||
lockfile: uv.lock
|
||||
|
||||
- name: Dead code detection with Vulture
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
# Run even when osv-scanner reports findings so dead-code signal isn't masked by SCA failures.
|
||||
if: ${{ !cancelled() && steps.check-changes.outputs.any_changed == 'true' }}
|
||||
run: uv run vulture --exclude ".venv,contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
@@ -92,9 +92,9 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Setup Python with uv
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
uses: ./.github/actions/setup-python-uv
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/aws/**
|
||||
./tests/**/aws/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Resolve AWS services under test
|
||||
if: steps.changed-aws.outputs.any_changed == 'true'
|
||||
@@ -209,11 +209,11 @@ jobs:
|
||||
echo "AWS service_paths='${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}'"
|
||||
|
||||
if [ "${STEPS_AWS_SERVICES_OUTPUTS_RUN_ALL}" = "true" ]; then
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
uv run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
elif [ -z "${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}" ]; then
|
||||
echo "No AWS service paths detected; skipping AWS tests."
|
||||
else
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml ${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}
|
||||
uv run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml ${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}
|
||||
fi
|
||||
env:
|
||||
STEPS_AWS_SERVICES_OUTPUTS_RUN_ALL: ${{ steps.aws-services.outputs.run_all }}
|
||||
@@ -237,11 +237,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/azure/**
|
||||
./tests/**/azure/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run Azure tests
|
||||
if: steps.changed-azure.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
|
||||
|
||||
- name: Upload Azure coverage to Codecov
|
||||
if: steps.changed-azure.outputs.any_changed == 'true'
|
||||
@@ -261,11 +261,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/gcp/**
|
||||
./tests/**/gcp/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run GCP tests
|
||||
if: steps.changed-gcp.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
|
||||
|
||||
- name: Upload GCP coverage to Codecov
|
||||
if: steps.changed-gcp.outputs.any_changed == 'true'
|
||||
@@ -285,11 +285,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/kubernetes/**
|
||||
./tests/**/kubernetes/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run Kubernetes tests
|
||||
if: steps.changed-kubernetes.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
|
||||
|
||||
- name: Upload Kubernetes coverage to Codecov
|
||||
if: steps.changed-kubernetes.outputs.any_changed == 'true'
|
||||
@@ -309,11 +309,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/github/**
|
||||
./tests/**/github/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run GitHub tests
|
||||
if: steps.changed-github.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
|
||||
|
||||
- name: Upload GitHub coverage to Codecov
|
||||
if: steps.changed-github.outputs.any_changed == 'true'
|
||||
@@ -324,6 +324,30 @@ jobs:
|
||||
flags: prowler-py${{ matrix.python-version }}-github
|
||||
files: ./github_coverage.xml
|
||||
|
||||
# Okta Provider
|
||||
- name: Check if Okta files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-okta
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/okta/**
|
||||
./tests/**/okta/**
|
||||
./uv.lock
|
||||
|
||||
- name: Run Okta tests
|
||||
if: steps.changed-okta.outputs.any_changed == 'true'
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/okta --cov-report=xml:okta_coverage.xml tests/providers/okta
|
||||
|
||||
- name: Upload Okta coverage to Codecov
|
||||
if: steps.changed-okta.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler-py${{ matrix.python-version }}-okta
|
||||
files: ./okta_coverage.xml
|
||||
|
||||
# NHN Provider
|
||||
- name: Check if NHN files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
@@ -333,11 +357,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/nhn/**
|
||||
./tests/**/nhn/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run NHN tests
|
||||
if: steps.changed-nhn.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
|
||||
|
||||
- name: Upload NHN coverage to Codecov
|
||||
if: steps.changed-nhn.outputs.any_changed == 'true'
|
||||
@@ -357,11 +381,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/m365/**
|
||||
./tests/**/m365/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run M365 tests
|
||||
if: steps.changed-m365.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
- name: Upload M365 coverage to Codecov
|
||||
if: steps.changed-m365.outputs.any_changed == 'true'
|
||||
@@ -381,11 +405,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/iac/**
|
||||
./tests/**/iac/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run IaC tests
|
||||
if: steps.changed-iac.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
|
||||
|
||||
- name: Upload IaC coverage to Codecov
|
||||
if: steps.changed-iac.outputs.any_changed == 'true'
|
||||
@@ -405,11 +429,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/mongodbatlas/**
|
||||
./tests/**/mongodbatlas/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run MongoDB Atlas tests
|
||||
if: steps.changed-mongodbatlas.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodbatlas_coverage.xml tests/providers/mongodbatlas
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodbatlas_coverage.xml tests/providers/mongodbatlas
|
||||
|
||||
- name: Upload MongoDB Atlas coverage to Codecov
|
||||
if: steps.changed-mongodbatlas.outputs.any_changed == 'true'
|
||||
@@ -429,11 +453,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/oraclecloud/**
|
||||
./tests/**/oraclecloud/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run OCI tests
|
||||
if: steps.changed-oraclecloud.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/oraclecloud --cov-report=xml:oraclecloud_coverage.xml tests/providers/oraclecloud
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/oraclecloud --cov-report=xml:oraclecloud_coverage.xml tests/providers/oraclecloud
|
||||
|
||||
- name: Upload OCI coverage to Codecov
|
||||
if: steps.changed-oraclecloud.outputs.any_changed == 'true'
|
||||
@@ -453,11 +477,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/openstack/**
|
||||
./tests/**/openstack/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run OpenStack tests
|
||||
if: steps.changed-openstack.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/openstack --cov-report=xml:openstack_coverage.xml tests/providers/openstack
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/openstack --cov-report=xml:openstack_coverage.xml tests/providers/openstack
|
||||
|
||||
- name: Upload OpenStack coverage to Codecov
|
||||
if: steps.changed-openstack.outputs.any_changed == 'true'
|
||||
@@ -477,11 +501,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/googleworkspace/**
|
||||
./tests/**/googleworkspace/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run Google Workspace tests
|
||||
if: steps.changed-googleworkspace.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/googleworkspace --cov-report=xml:googleworkspace_coverage.xml tests/providers/googleworkspace
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/googleworkspace --cov-report=xml:googleworkspace_coverage.xml tests/providers/googleworkspace
|
||||
|
||||
- name: Upload Google Workspace coverage to Codecov
|
||||
if: steps.changed-googleworkspace.outputs.any_changed == 'true'
|
||||
@@ -501,11 +525,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/**/vercel/**
|
||||
./tests/**/vercel/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run Vercel tests
|
||||
if: steps.changed-vercel.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/vercel --cov-report=xml:vercel_coverage.xml tests/providers/vercel
|
||||
run: uv run pytest -n auto --cov=./prowler/providers/vercel --cov-report=xml:vercel_coverage.xml tests/providers/vercel
|
||||
|
||||
- name: Upload Vercel coverage to Codecov
|
||||
if: steps.changed-vercel.outputs.any_changed == 'true'
|
||||
@@ -525,11 +549,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/lib/**
|
||||
./tests/lib/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run Lib tests
|
||||
if: steps.changed-lib.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
|
||||
run: uv run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
|
||||
|
||||
- name: Upload Lib coverage to Codecov
|
||||
if: steps.changed-lib.outputs.any_changed == 'true'
|
||||
@@ -549,11 +573,11 @@ jobs:
|
||||
files: |
|
||||
./prowler/config/**
|
||||
./tests/config/**
|
||||
./poetry.lock
|
||||
./uv.lock
|
||||
|
||||
- name: Run Config tests
|
||||
if: steps.changed-config.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
|
||||
run: uv run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
|
||||
|
||||
- name: Upload Config coverage to Codecov
|
||||
if: steps.changed-config.outputs.any_changed == 'true'
|
||||
|
||||
@@ -130,6 +130,12 @@ jobs:
|
||||
echo "AWS_ACCESS_KEY_ID=${{ secrets.E2E_AWS_PROVIDER_ACCESS_KEY }}" >> .env
|
||||
echo "AWS_SECRET_ACCESS_KEY=${{ secrets.E2E_AWS_PROVIDER_SECRET_KEY }}" >> .env
|
||||
|
||||
- name: Build API image from current code
|
||||
# docker-compose.yml references prowlercloud/prowler-api:latest from the registry,
|
||||
# which lags behind PR changes; build locally so E2E exercises the API image
|
||||
# produced by this PR.
|
||||
run: docker build -t prowlercloud/prowler-api:latest ./api
|
||||
|
||||
- name: Start API services
|
||||
run: |
|
||||
export PROWLER_API_VERSION=latest
|
||||
@@ -158,7 +164,7 @@ jobs:
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
uv run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
'
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
name: 'UI: Security'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'ui/package.json'
|
||||
- 'ui/pnpm-lock.yaml'
|
||||
- '.github/workflows/ui-security.yml'
|
||||
- '.github/actions/osv-scanner/**'
|
||||
- '.github/scripts/osv-scan.sh'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'ui/package.json'
|
||||
- 'ui/pnpm-lock.yaml'
|
||||
- '.github/workflows/ui-security.yml'
|
||||
- '.github/actions/osv-scanner/**'
|
||||
- '.github/scripts/osv-scan.sh'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
ui-security-scans:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write # osv-scanner action posts/updates a PR comment with findings
|
||||
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
github.com:443
|
||||
api.github.com:443
|
||||
objects.githubusercontent.com:443
|
||||
release-assets.githubusercontent.com:443
|
||||
api.osv.dev:443
|
||||
api.deps.dev:443
|
||||
osv-vulnerabilities.storage.googleapis.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Check for UI dependency changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
ui/package.json
|
||||
ui/pnpm-lock.yaml
|
||||
.github/workflows/ui-security.yml
|
||||
.github/actions/osv-scanner/**
|
||||
.github/scripts/osv-scan.sh
|
||||
|
||||
- name: Dependency vulnerability scan with osv-scanner
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/osv-scanner
|
||||
with:
|
||||
lockfile: ui/pnpm-lock.yaml
|
||||
@@ -132,6 +132,10 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run healthcheck
|
||||
|
||||
- name: Run pnpm audit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run audit
|
||||
|
||||
- name: Run unit tests (all - critical paths changed)
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
|
||||
@@ -107,35 +107,21 @@ repos:
|
||||
files: { glob: ["{api,mcp_server}/**/*.py"] }
|
||||
priority: 20
|
||||
|
||||
## PYTHON — Poetry
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.3.4
|
||||
## PYTHON — uv (API + SDK)
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
rev: 0.11.14
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
args: ["--directory=./api"]
|
||||
files: { glob: ["api/{pyproject.toml,poetry.lock}"] }
|
||||
- id: uv-lock
|
||||
name: API - uv-lock
|
||||
args: ["--check", "--project=./api"]
|
||||
files: { glob: ["api/{pyproject.toml,uv.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
- id: poetry-lock
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
files: { glob: ["api/{pyproject.toml,poetry.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
- id: poetry-check
|
||||
name: SDK - poetry-check
|
||||
args: ["--directory=./"]
|
||||
files: { glob: ["{pyproject.toml,poetry.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
files: { glob: ["{pyproject.toml,poetry.lock}"] }
|
||||
- id: uv-lock
|
||||
name: SDK - uv-lock
|
||||
args: ["--check", "--project=./"]
|
||||
files: { glob: ["{pyproject.toml,uv.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
@@ -179,16 +165,6 @@ repos:
|
||||
exclude: { glob: ["{contrib,skills}/**", "**/.venv/**", "**/*_test.py"] }
|
||||
priority: 40
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
# Accepted CVEs, severity threshold, and ignore expirations live in .safety-policy.yml
|
||||
entry: safety check --policy-file .safety-policy.yml
|
||||
language: system
|
||||
pass_filenames: false
|
||||
files: { glob: ["**/pyproject.toml", "**/poetry.lock", "**/requirements*.txt", ".safety-policy.yml"] }
|
||||
priority: 40
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
|
||||
@@ -11,15 +11,11 @@ build:
|
||||
python: "3.11"
|
||||
jobs:
|
||||
post_create_environment:
|
||||
# Install poetry
|
||||
# https://python-poetry.org/docs/#installing-manually
|
||||
- python -m pip install poetry==2.3.4
|
||||
- python -m pip install uv==0.11.14
|
||||
post_install:
|
||||
# Install dependencies with 'docs' dependency group
|
||||
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
|
||||
# VIRTUAL_ENV needs to be set manually for now.
|
||||
# See https://github.com/readthedocs/readthedocs.org/pull/11152/
|
||||
- VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} python -m poetry install --only=docs
|
||||
- VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} uv sync --group docs --no-install-project
|
||||
|
||||
mkdocs:
|
||||
configuration: mkdocs.yml
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
# Safety policy for `safety check` (Safety CLI 3.x, v2 schema).
|
||||
# Applied in: .pre-commit-config.yaml, .github/workflows/api-security.yml,
|
||||
# .github/workflows/sdk-security.yml via `--policy-file`.
|
||||
#
|
||||
# Validate: poetry run safety validate policy_file --path .safety-policy.yml
|
||||
|
||||
security:
|
||||
# Scan unpinned requirements too. Prowler pins via poetry.lock, so this is
|
||||
# defensive against accidental unpinned entries.
|
||||
ignore-unpinned-requirements: False
|
||||
|
||||
# CVSS severity filter. 7 = report only HIGH (7.0–8.9) and CRITICAL (9.0–10.0).
|
||||
# Reference: 9=CRITICAL only, 7=CRITICAL+HIGH, 4=CRITICAL+HIGH+MEDIUM.
|
||||
ignore-cvss-severity-below: 7
|
||||
|
||||
# Unknown severity is unrated, not safe. Keep False so unrated CVEs still fail
|
||||
# the build and get a human eye. Flip to True only if noise is unmanageable.
|
||||
ignore-cvss-unknown-severity: False
|
||||
|
||||
# Fail the build when a non-ignored vulnerability is found.
|
||||
continue-on-vulnerability-error: False
|
||||
|
||||
# Explicit accepted vulnerabilities. Each entry MUST have a reason and an
|
||||
# expiry. Expired entries fail the scan, forcing re-audit.
|
||||
ignore-vulnerabilities:
|
||||
77744:
|
||||
reason: "Botocore requires urllib3 1.X. Remove once upgraded to urllib3 2.X."
|
||||
expires: '2026-10-22'
|
||||
77745:
|
||||
reason: "Botocore requires urllib3 1.X. Remove once upgraded to urllib3 2.X."
|
||||
expires: '2026-10-22'
|
||||
79023:
|
||||
reason: "knack ReDoS; blocked until azure-cli-core (via cartography) allows knack >=0.13.0."
|
||||
expires: '2026-10-22'
|
||||
79027:
|
||||
reason: "knack ReDoS; blocked until azure-cli-core (via cartography) allows knack >=0.13.0."
|
||||
expires: '2026-10-22'
|
||||
86217:
|
||||
reason: "alibabacloud-tea-openapi==0.4.3 blocks upgrade to cryptography >=46.0.0."
|
||||
expires: '2026-10-22'
|
||||
71600:
|
||||
reason: "CVE-2024-1135 false positive. Fixed in gunicorn 22.0.0; project uses 23.0.0."
|
||||
expires: '2026-10-22'
|
||||
70612:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
66963:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
74429:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
76352:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
76353:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
@@ -15,7 +15,7 @@ Use these skills for detailed patterns on-demand:
|
||||
|-------|-------------|-----|
|
||||
| `typescript` | Const types, flat interfaces, utility types | [SKILL.md](skills/typescript/SKILL.md) |
|
||||
| `react-19` | No useMemo/useCallback, React Compiler | [SKILL.md](skills/react-19/SKILL.md) |
|
||||
| `nextjs-15` | App Router, Server Actions, streaming | [SKILL.md](skills/nextjs-15/SKILL.md) |
|
||||
| `nextjs-16` | App Router, Server Actions, proxy.ts, streaming | [SKILL.md](skills/nextjs-16/SKILL.md) |
|
||||
| `tailwind-4` | cn() utility, no var() in className | [SKILL.md](skills/tailwind-4/SKILL.md) |
|
||||
| `playwright` | Page Object Model, MCP workflow, selectors | [SKILL.md](skills/playwright/SKILL.md) |
|
||||
| `pytest` | Fixtures, mocking, markers, parametrize | [SKILL.md](skills/pytest/SKILL.md) |
|
||||
@@ -60,11 +60,14 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding DRF pagination or permissions | `django-drf` |
|
||||
| Adding a compliance output formatter (per-provider class + table dispatcher) | `prowler-compliance` |
|
||||
| Adding indexes or constraints to database tables | `django-migration-psql` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| After creating/modifying a skill | `skill-sync` |
|
||||
| App Router / Server Actions | `nextjs-15` |
|
||||
| App Router / Server Actions | `nextjs-16` |
|
||||
| Auditing check-to-requirement mappings as a cloud auditor | `prowler-compliance` |
|
||||
| Building AI chat features | `ai-sdk-5` |
|
||||
| Committing changes | `prowler-commit` |
|
||||
| Configuring MCP servers in agentic workflows | `gh-aw` |
|
||||
@@ -78,6 +81,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Creating a git commit | `prowler-commit` |
|
||||
| Creating new checks | `prowler-sdk-check` |
|
||||
| Creating new skills | `skill-creator` |
|
||||
| Creating or reviewing Django migrations | `django-migration-psql` |
|
||||
| Creating/modifying Prowler UI components | `prowler-ui` |
|
||||
| Creating/modifying models, views, serializers | `prowler-api` |
|
||||
| Creating/updating compliance frameworks | `prowler-compliance` |
|
||||
@@ -85,6 +89,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Debugging gh-aw compilation errors | `gh-aw` |
|
||||
| Fill .github/pull_request_template.md (Context/Description/Steps to review/Checklist) | `prowler-pr` |
|
||||
| Fixing bug | `tdd` |
|
||||
| Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs) | `prowler-compliance` |
|
||||
| General Prowler development questions | `prowler` |
|
||||
| Implementing JSON:API endpoints | `django-drf` |
|
||||
| Implementing feature | `tdd` |
|
||||
@@ -102,6 +107,8 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Review changelog format and conventions | `prowler-changelog` |
|
||||
| Reviewing JSON:API compliance | `jsonapi` |
|
||||
| Reviewing compliance framework PRs | `prowler-compliance-review` |
|
||||
| Running makemigrations or pgmakemigrations | `django-migration-psql` |
|
||||
| Syncing compliance framework with upstream catalog | `prowler-compliance` |
|
||||
| Testing RLS tenant isolation | `prowler-test-api` |
|
||||
| Testing hooks or utilities | `vitest` |
|
||||
| Troubleshoot why a skill is missing from AGENTS.md auto-invoke | `skill-sync` |
|
||||
@@ -129,6 +136,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Writing React components | `react-19` |
|
||||
| Writing TypeScript types/interfaces | `typescript` |
|
||||
| Writing Vitest tests | `vitest` |
|
||||
| Writing data backfill or data migration | `django-migration-psql` |
|
||||
| Writing documentation | `prowler-docs` |
|
||||
| Writing unit tests for UI | `vitest` |
|
||||
|
||||
@@ -140,9 +148,9 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
|
||||
| Component | Location | Tech Stack |
|
||||
|-----------|----------|------------|
|
||||
| SDK | `prowler/` | Python 3.10+, Poetry 2.3+ |
|
||||
| SDK | `prowler/` | Python 3.10+, uv |
|
||||
| API | `api/` | Django 5.1, DRF, Celery |
|
||||
| UI | `ui/` | Next.js 15, React 19, Tailwind 4 |
|
||||
| UI | `ui/` | Next.js 16, React 19, Tailwind 4 |
|
||||
| MCP Server | `mcp_server/` | FastMCP, Python 3.12+ |
|
||||
| Dashboard | `dashboard/` | Dash, Plotly |
|
||||
|
||||
@@ -152,13 +160,13 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
|
||||
```bash
|
||||
# Setup
|
||||
poetry install --with dev
|
||||
poetry run prek install
|
||||
uv sync
|
||||
uv run prek install
|
||||
|
||||
# Code quality
|
||||
poetry run make lint
|
||||
poetry run make format
|
||||
poetry run prek run --all-files
|
||||
uv run make lint
|
||||
uv run make format
|
||||
uv run prek run --all-files
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -78,7 +78,7 @@ WORKDIR /home/prowler
|
||||
# Copy necessary files
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY pyproject.toml uv.lock /home/prowler/
|
||||
COPY README.md /home/prowler/
|
||||
COPY prowler/providers/m365/lib/powershell/m365_powershell.py /home/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
|
||||
@@ -87,17 +87,17 @@ ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry==2.3.4
|
||||
pip install --no-cache-dir uv==0.11.14
|
||||
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
RUN uv sync --compile-bytecode && \
|
||||
rm -rf ~/.cache/uv
|
||||
|
||||
# Install PowerShell modules
|
||||
RUN poetry run python prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
RUN .venv/bin/python prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
ENTRYPOINT [".venv/bin/prowler"]
|
||||
|
||||
@@ -23,7 +23,7 @@ format: ## Format Code
|
||||
|
||||
lint: ## Lint Code
|
||||
@echo "Running flake8..."
|
||||
flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude .venv,contrib
|
||||
@echo "Running black... "
|
||||
black --check .
|
||||
@echo "Running pylint..."
|
||||
@@ -35,7 +35,7 @@ pypi-clean: ## Delete the distribution files
|
||||
|
||||
pypi-build: ## Build package
|
||||
$(MAKE) pypi-clean && \
|
||||
poetry build
|
||||
uv build
|
||||
|
||||
pypi-upload: ## Upload package
|
||||
python3 -m twine upload --repository pypi dist/*
|
||||
@@ -56,4 +56,3 @@ run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, MCP,
|
||||
|
||||
##@ Development Environment
|
||||
build-and-run-api-dev: build-no-cache-dev run-api-dev
|
||||
|
||||
|
||||
@@ -117,9 +117,10 @@ Every AWS provider scan will enqueue an Attack Paths ingestion job automatically
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 8 | Official | UI, API, CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | CLI |
|
||||
| Image | N/A | N/A | N/A | N/A | Official | CLI, API |
|
||||
| Google Workspace | 25 | 4 | 2 | 4 | Official | CLI |
|
||||
| Google Workspace | 25 | 4 | 2 | 4 | Official | UI, API, CLI |
|
||||
| OpenStack | 34 | 5 | 0 | 9 | Official | UI, API, CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | UI, API, CLI |
|
||||
| Okta | 1 | 1 | 0 | 1 | Official | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
> [!Note]
|
||||
@@ -176,7 +177,7 @@ You can find more information in the [Troubleshooting](./docs/troubleshooting.md
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `uv` installed: [uv installation](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
* `pnpm` installed: [pnpm installation](https://pnpm.io/installation).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
@@ -185,8 +186,8 @@ You can find more information in the [Troubleshooting](./docs/troubleshooting.md
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
uv sync
|
||||
source .venv/bin/activate
|
||||
set -a
|
||||
source .env
|
||||
docker compose up postgres valkey -d
|
||||
@@ -194,11 +195,6 @@ cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
|
||||
> After completing the setup, access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
@@ -207,8 +203,8 @@ gunicorn -c config/guniconf.py config.wsgi:application
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
uv sync
|
||||
source .venv/bin/activate
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
@@ -220,8 +216,8 @@ python -m celery -A config.celery worker -l info -E
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
uv sync
|
||||
source .venv/bin/activate
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
@@ -282,24 +278,18 @@ The container images are available here:
|
||||
|
||||
### From GitHub
|
||||
|
||||
Python >=3.10, <3.13 is required with pip and Poetry:
|
||||
Python >=3.10, <3.13 is required with [uv](https://docs.astral.sh/uv/):
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
uv sync
|
||||
source .venv/bin/activate
|
||||
python prowler-cli.py -v
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> To clone Prowler on Windows, configure Git to support long file paths by running the following command: `git config core.longpaths true`.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
|
||||
# 🛡️ GitHub Action
|
||||
|
||||
The official **Prowler GitHub Action** runs Prowler scans in your GitHub workflows using the official [`prowlercloud/prowler`](https://hub.docker.com/r/prowlercloud/prowler) Docker image. Scans run on any [supported provider](https://docs.prowler.com/user-guide/providers/), with optional [`--push-to-cloud`](https://docs.prowler.com/user-guide/tutorials/prowler-app-import-findings) to send findings to Prowler Cloud and optional SARIF upload so findings show up in the repo's **Security → Code scanning** tab and as inline PR annotations.
|
||||
|
||||
@@ -167,7 +167,7 @@ runs:
|
||||
|
||||
- name: Upload SARIF to GitHub Code Scanning
|
||||
if: always() && inputs.upload-sarif == 'true' && steps.find-sarif.outputs.sarif_path != ''
|
||||
uses: github/codeql-action/upload-sarif@d4b3ca9fa7f69d38bfcd667bdc45bc373d16277e # v4
|
||||
uses: github/codeql-action/upload-sarif@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4.35.4
|
||||
with:
|
||||
sarif_file: ${{ steps.find-sarif.outputs.sarif_path }}
|
||||
category: ${{ inputs.sarif-category }}
|
||||
|
||||
@@ -124,24 +124,24 @@ api/src/backend/
|
||||
|
||||
```bash
|
||||
# Development
|
||||
poetry run python src/backend/manage.py runserver
|
||||
poetry run celery -A config.celery worker -l INFO
|
||||
uv run python src/backend/manage.py runserver
|
||||
uv run celery -A config.celery worker -l INFO
|
||||
|
||||
# Database
|
||||
poetry run python src/backend/manage.py makemigrations
|
||||
poetry run python src/backend/manage.py migrate
|
||||
uv run python src/backend/manage.py makemigrations
|
||||
uv run python src/backend/manage.py migrate
|
||||
|
||||
# Testing & Linting
|
||||
poetry run pytest -x --tb=short
|
||||
poetry run make lint
|
||||
uv run pytest -x --tb=short
|
||||
uv run make lint
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## QA CHECKLIST
|
||||
|
||||
- [ ] `poetry run pytest` passes
|
||||
- [ ] `poetry run make lint` passes
|
||||
- [ ] `uv run pytest` passes
|
||||
- [ ] `uv run make lint` passes
|
||||
- [ ] Migrations created if models changed
|
||||
- [ ] New endpoints have `@extend_schema` decorators
|
||||
- [ ] RLS properly applied for tenant data
|
||||
|
||||
@@ -10,10 +10,19 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Replace `poetry` with `uv` (`0.11.14`) as the API package manager; migrate `pyproject.toml` to `[dependency-groups]` and regenerate as `uv.lock` [(#10775)](https://github.com/prowler-cloud/prowler/pull/10775)
|
||||
- Remove orphaned `gin_resources_search_idx` declaration from `Resource.Meta.indexes` (DB index dropped in `0072_drop_unused_indexes`) [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.2] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Attack Paths: BEDROCK-001 and BEDROCK-002 now target roles trusting `bedrock-agentcore.amazonaws.com` instead of `bedrock.amazonaws.com`, eliminating false positives against regular Bedrock service roles (Agents, Knowledge Bases, model invocation) [(#11141)](https://github.com/prowler-cloud/prowler/pull/11141)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
@@ -14,6 +14,7 @@ ENV ZIZMOR_VERSION=${ZIZMOR_VERSION}
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget \
|
||||
git \
|
||||
libicu72 \
|
||||
gcc \
|
||||
g++ \
|
||||
@@ -88,18 +89,18 @@ WORKDIR /home/prowler
|
||||
# Ensure output directory exists
|
||||
RUN mkdir -p /tmp/prowler_api_output
|
||||
|
||||
COPY pyproject.toml ./
|
||||
COPY pyproject.toml uv.lock ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry==2.3.4
|
||||
pip install --no-cache-dir uv==0.11.14
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
rm -rf ~/.cache/pip
|
||||
# Add `--no-install-project` to avoid installing the current project as a package
|
||||
RUN uv sync --no-install-project && \
|
||||
rm -rf ~/.cache/uv
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
RUN .venv/bin/python .venv/lib/python3.12/site-packages/prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
@@ -25,12 +25,11 @@ If you don’t set `DJANGO_TOKEN_SIGNING_KEY` or `DJANGO_TOKEN_VERIFYING_KEY`, t
|
||||
**Important note**: Every Prowler version (or repository branches and tags) could have different variables set in its `.env` file. Please use the `.env` file that corresponds with each version.
|
||||
|
||||
## Local deployment
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the virtual environment, not before. Otherwise, variables will not be loaded properly.
|
||||
|
||||
To do this, you can run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
```
|
||||
@@ -78,7 +77,7 @@ docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `uv` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
@@ -90,11 +89,10 @@ git clone https://github.com/prowler-cloud/api.git
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
### Install all dependencies with Poetry
|
||||
### Install all dependencies with uv
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
uv sync
|
||||
```
|
||||
|
||||
## Start the PostgreSQL Database and Valkey
|
||||
@@ -139,7 +137,7 @@ gunicorn -c config/guniconf.py config.wsgi:application
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `uv` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
@@ -165,11 +163,10 @@ docker compose up postgres valkey -d
|
||||
|
||||
### Install the Python dependencies
|
||||
|
||||
> You must have Poetry installed
|
||||
> You must have uv installed
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
uv sync
|
||||
```
|
||||
|
||||
### Apply migrations
|
||||
@@ -246,9 +243,8 @@ docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
uv run python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
## Apply fixtures
|
||||
@@ -256,9 +252,8 @@ python manage.py migrate --database admin
|
||||
Fixtures are used to populate the database with initial development data.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
uv run python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
|
||||
@@ -270,9 +265,8 @@ Note that the tests will fail if you use the same `.env` file as the development
|
||||
For best results, run in a new shell with no environment variables set.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
pytest
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
# Custom commands
|
||||
@@ -284,8 +278,7 @@ Django provides a way to create custom commands that can be run from the command
|
||||
To run a custom command, you need to be in the `prowler/api/src/backend` directory and run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
python manage.py <command_name>
|
||||
uv run python manage.py <command_name>
|
||||
```
|
||||
|
||||
## Generate dummy data
|
||||
@@ -308,7 +301,7 @@ This command creates, for a given tenant, a provider, scan and a set of findings
|
||||
### Example
|
||||
|
||||
```console
|
||||
~/backend $ poetry run python manage.py findings --tenant
|
||||
~/backend $ uv run python manage.py findings --tenant
|
||||
fffb1893-3fc7-4623-a5d9-fae47da1c528 --findings 25000 --re
|
||||
sources 1000 --batch 5000 --alias test-script
|
||||
|
||||
|
||||
@@ -5,9 +5,9 @@ apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
|
||||
# Fix Inconsistent migration history after adding sites app
|
||||
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
|
||||
uv run python manage.py check_and_fix_socialaccount_sites_migration --database admin
|
||||
|
||||
poetry run python manage.py migrate --database admin
|
||||
uv run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
apply_fixtures() {
|
||||
@@ -15,19 +15,19 @@ apply_fixtures() {
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
uv run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
start_dev_server() {
|
||||
echo "Starting the development server..."
|
||||
poetry run python manage.py runserver 0.0.0.0:"${DJANGO_PORT:-8080}"
|
||||
uv run python manage.py runserver 0.0.0.0:"${DJANGO_PORT:-8080}"
|
||||
}
|
||||
|
||||
start_prod_server() {
|
||||
echo "Starting the Gunicorn server..."
|
||||
poetry run gunicorn -c config/guniconf.py config.wsgi:application
|
||||
uv run gunicorn -c config/guniconf.py config.wsgi:application
|
||||
}
|
||||
|
||||
resolve_worker_hostname() {
|
||||
@@ -47,7 +47,7 @@ resolve_worker_hostname() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker \
|
||||
uv run python -m celery -A config.celery worker \
|
||||
-n "$(resolve_worker_hostname)" \
|
||||
-l "${DJANGO_LOGGING_LEVEL:-info}" \
|
||||
-Q celery,scans,scan-reports,deletion,backfill,overview,integrations,compliance,attack-paths-scans \
|
||||
@@ -56,7 +56,7 @@ start_worker() {
|
||||
|
||||
start_worker_beat() {
|
||||
echo "Starting the worker-beat..."
|
||||
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
uv run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
}
|
||||
|
||||
manage_db_partitions() {
|
||||
@@ -64,7 +64,7 @@ manage_db_partitions() {
|
||||
echo "Managing DB partitions..."
|
||||
# For now we skip the deletion of partitions until we define the data retention policy
|
||||
# --yes auto approves the operation without the need of an interactive terminal
|
||||
poetry run python manage.py pgpartition --using admin --skip-delete --yes
|
||||
uv run python manage.py pgpartition --using admin --skip-delete --yes
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,24 @@
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"bandit==1.7.9",
|
||||
"coverage==7.5.4",
|
||||
"django-silk==5.3.2",
|
||||
"docker==7.1.0",
|
||||
"filelock==3.20.3",
|
||||
"freezegun==1.5.1",
|
||||
"mypy==1.10.1",
|
||||
"pylint==3.2.5",
|
||||
"pytest==9.0.3",
|
||||
"pytest-cov==5.0.0",
|
||||
"pytest-django==4.8.0",
|
||||
"pytest-env==1.1.3",
|
||||
"pytest-randomly==3.15.0",
|
||||
"pytest-xdist==3.6.1",
|
||||
"ruff==0.5.0",
|
||||
"tqdm==4.67.1",
|
||||
"vulture==2.14",
|
||||
"prek==0.3.9"
|
||||
]
|
||||
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
@@ -52,26 +70,374 @@ package-mode = false
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.28.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
filelock = "3.20.3"
|
||||
freezegun = "1.5.1"
|
||||
mypy = "1.10.1"
|
||||
prek = "0.3.9"
|
||||
pylint = "3.2.5"
|
||||
pytest = "9.0.3"
|
||||
pytest-cov = "5.0.0"
|
||||
pytest-django = "4.8.0"
|
||||
pytest-env = "1.1.3"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.7.0"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
[tool.uv]
|
||||
# Transitive pins matching master to avoid silent drift; bump deliberately.
|
||||
constraint-dependencies = [
|
||||
"about-time==4.2.1",
|
||||
"adal==1.2.7",
|
||||
"aioboto3==15.5.0",
|
||||
"aiobotocore==2.25.1",
|
||||
"aiofiles==24.1.0",
|
||||
"aiohappyeyeballs==2.6.1",
|
||||
"aiohttp==3.13.5",
|
||||
"aioitertools==0.13.0",
|
||||
"aiosignal==1.4.0",
|
||||
"alibabacloud-actiontrail20200706==2.4.1",
|
||||
"alibabacloud-credentials==1.0.3",
|
||||
"alibabacloud-credentials-api==1.0.0",
|
||||
"alibabacloud-cs20151215==6.1.0",
|
||||
"alibabacloud-darabonba-array==0.1.0",
|
||||
"alibabacloud-darabonba-encode-util==0.0.2",
|
||||
"alibabacloud-darabonba-map==0.0.1",
|
||||
"alibabacloud-darabonba-signature-util==0.0.4",
|
||||
"alibabacloud-darabonba-string==0.0.4",
|
||||
"alibabacloud-darabonba-time==0.0.1",
|
||||
"alibabacloud-ecs20140526==7.2.5",
|
||||
"alibabacloud-endpoint-util==0.0.4",
|
||||
"alibabacloud-gateway-oss==0.0.17",
|
||||
"alibabacloud-gateway-oss-util==0.0.3",
|
||||
"alibabacloud-gateway-sls==0.4.0",
|
||||
"alibabacloud-gateway-sls-util==0.4.0",
|
||||
"alibabacloud-gateway-spi==0.0.3",
|
||||
"alibabacloud-openapi-util==0.2.4",
|
||||
"alibabacloud-oss-util==0.0.6",
|
||||
"alibabacloud-oss20190517==1.0.6",
|
||||
"alibabacloud-ram20150501==1.2.0",
|
||||
"alibabacloud-rds20140815==12.0.0",
|
||||
"alibabacloud-sas20181203==6.1.0",
|
||||
"alibabacloud-sls20201230==5.9.0",
|
||||
"alibabacloud-sts20150401==1.1.6",
|
||||
"alibabacloud-tea==0.4.3",
|
||||
"alibabacloud-tea-openapi==0.4.4",
|
||||
"alibabacloud-tea-util==0.3.14",
|
||||
"alibabacloud-tea-xml==0.0.3",
|
||||
"alibabacloud-vpc20160428==6.13.0",
|
||||
"alive-progress==3.3.0",
|
||||
"aliyun-log-fastpb==0.2.0",
|
||||
"amqp==5.3.1",
|
||||
"annotated-types==0.7.0",
|
||||
"anyio==4.12.1",
|
||||
"applicationinsights==0.11.10",
|
||||
"apscheduler==3.11.2",
|
||||
"argcomplete==3.5.3",
|
||||
"asgiref==3.11.0",
|
||||
"astroid==3.2.4",
|
||||
"async-timeout==5.0.1",
|
||||
"attrs==25.4.0",
|
||||
"authlib==1.6.9",
|
||||
"autopep8==2.3.2",
|
||||
"awsipranges==0.3.3",
|
||||
"azure-cli-core==2.83.0",
|
||||
"azure-cli-telemetry==1.1.0",
|
||||
"azure-common==1.1.28",
|
||||
"azure-core==1.38.1",
|
||||
"azure-identity==1.21.0",
|
||||
"azure-keyvault-certificates==4.10.0",
|
||||
"azure-keyvault-keys==4.10.0",
|
||||
"azure-keyvault-secrets==4.10.0",
|
||||
"azure-mgmt-apimanagement==5.0.0",
|
||||
"azure-mgmt-applicationinsights==4.1.0",
|
||||
"azure-mgmt-authorization==4.0.0",
|
||||
"azure-mgmt-compute==34.0.0",
|
||||
"azure-mgmt-containerinstance==10.1.0",
|
||||
"azure-mgmt-containerregistry==12.0.0",
|
||||
"azure-mgmt-containerservice==34.1.0",
|
||||
"azure-mgmt-core==1.6.0",
|
||||
"azure-mgmt-cosmosdb==9.7.0",
|
||||
"azure-mgmt-databricks==2.0.0",
|
||||
"azure-mgmt-datafactory==9.2.0",
|
||||
"azure-mgmt-eventgrid==10.4.0",
|
||||
"azure-mgmt-eventhub==11.2.0",
|
||||
"azure-mgmt-keyvault==10.3.1",
|
||||
"azure-mgmt-loganalytics==12.0.0",
|
||||
"azure-mgmt-logic==10.0.0",
|
||||
"azure-mgmt-monitor==6.0.2",
|
||||
"azure-mgmt-network==28.1.0",
|
||||
"azure-mgmt-postgresqlflexibleservers==1.1.0",
|
||||
"azure-mgmt-rdbms==10.1.0",
|
||||
"azure-mgmt-recoveryservices==3.1.0",
|
||||
"azure-mgmt-recoveryservicesbackup==9.2.0",
|
||||
"azure-mgmt-resource==24.0.0",
|
||||
"azure-mgmt-search==9.1.0",
|
||||
"azure-mgmt-security==7.0.0",
|
||||
"azure-mgmt-sql==3.0.1",
|
||||
"azure-mgmt-storage==22.1.1",
|
||||
"azure-mgmt-subscription==3.1.1",
|
||||
"azure-mgmt-synapse==2.0.0",
|
||||
"azure-mgmt-web==8.0.0",
|
||||
"azure-monitor-query==2.0.0",
|
||||
"azure-storage-blob==12.24.1",
|
||||
"azure-synapse-artifacts==0.21.0",
|
||||
"backoff==2.2.1",
|
||||
"bandit==1.7.9",
|
||||
"billiard==4.2.4",
|
||||
"blinker==1.9.0",
|
||||
"boto3==1.40.61",
|
||||
"botocore==1.40.61",
|
||||
"cartography==0.135.0",
|
||||
"celery==5.6.2",
|
||||
"certifi==2026.1.4",
|
||||
"cffi==2.0.0",
|
||||
"charset-normalizer==3.4.4",
|
||||
"circuitbreaker==2.1.3",
|
||||
"click==8.3.1",
|
||||
"click-didyoumean==0.3.1",
|
||||
"click-plugins==1.1.1.2",
|
||||
"click-repl==0.3.0",
|
||||
"cloudflare==4.3.1",
|
||||
"colorama==0.4.6",
|
||||
"contextlib2==21.6.0",
|
||||
"contourpy==1.3.3",
|
||||
"coverage==7.5.4",
|
||||
"cron-descriptor==1.4.5",
|
||||
"crowdstrike-falconpy==1.6.0",
|
||||
"cryptography==46.0.7",
|
||||
"cycler==0.12.1",
|
||||
"darabonba-core==1.0.5",
|
||||
"dash==3.1.1",
|
||||
"dash-bootstrap-components==2.0.3",
|
||||
"debugpy==1.8.20",
|
||||
"decorator==5.2.1",
|
||||
"defusedxml==0.7.1",
|
||||
"detect-secrets==1.5.0",
|
||||
"dill==0.4.1",
|
||||
"distro==1.9.0",
|
||||
"dj-rest-auth==7.0.1",
|
||||
"django==5.1.15",
|
||||
"django-allauth==65.15.0",
|
||||
"django-celery-beat==2.9.0",
|
||||
"django-celery-results==2.6.0",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra==2.0.9",
|
||||
"django-silk==5.3.2",
|
||||
"django-timezone-field==7.2.1",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt==5.5.1",
|
||||
"dnspython==2.8.0",
|
||||
"docker==7.1.0",
|
||||
"dogpile-cache==1.5.0",
|
||||
"dparse==0.6.4",
|
||||
"drf-extensions==0.8.0",
|
||||
"drf-nested-routers==0.95.0",
|
||||
"drf-simple-apikey==2.2.1",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"dulwich==0.23.0",
|
||||
"duo-client==5.5.0",
|
||||
"durationpy==0.10",
|
||||
"email-validator==2.2.0",
|
||||
"execnet==2.1.2",
|
||||
"filelock==3.20.3",
|
||||
"flask==3.1.3",
|
||||
"fonttools==4.62.1",
|
||||
"freezegun==1.5.1",
|
||||
"frozenlist==1.8.0",
|
||||
"gevent==25.9.1",
|
||||
"google-api-core==2.29.0",
|
||||
"google-api-python-client==2.163.0",
|
||||
"google-auth==2.48.0",
|
||||
"google-auth-httplib2==0.2.0",
|
||||
"google-cloud-access-context-manager==0.3.0",
|
||||
"google-cloud-asset==4.2.0",
|
||||
"google-cloud-org-policy==1.16.0",
|
||||
"google-cloud-os-config==1.23.0",
|
||||
"google-cloud-resource-manager==1.16.0",
|
||||
"googleapis-common-protos==1.72.0",
|
||||
"gprof2dot==2025.4.14",
|
||||
"graphemeu==0.7.2",
|
||||
"greenlet==3.3.1",
|
||||
"grpc-google-iam-v1==0.14.3",
|
||||
"grpcio==1.76.0",
|
||||
"grpcio-status==1.76.0",
|
||||
"gunicorn==23.0.0",
|
||||
"h11==0.16.0",
|
||||
"h2==4.3.0",
|
||||
"hpack==4.1.0",
|
||||
"httpcore==1.0.9",
|
||||
"httplib2==0.31.2",
|
||||
"httpx==0.28.1",
|
||||
"humanfriendly==10.0",
|
||||
"hyperframe==6.1.0",
|
||||
"iamdata==0.1.202602021",
|
||||
"idna==3.11",
|
||||
"importlib-metadata==8.7.1",
|
||||
"inflection==0.5.1",
|
||||
"iniconfig==2.3.0",
|
||||
"iso8601==2.1.0",
|
||||
"isodate==0.7.2",
|
||||
"isort==5.13.2",
|
||||
"itsdangerous==2.2.0",
|
||||
"jinja2==3.1.6",
|
||||
"jiter==0.13.0",
|
||||
"jmespath==1.1.0",
|
||||
"joblib==1.5.3",
|
||||
"jsonpatch==1.33",
|
||||
"jsonpickle==4.1.1",
|
||||
"jsonpointer==3.0.0",
|
||||
"jsonschema==4.23.0",
|
||||
"jsonschema-specifications==2025.9.1",
|
||||
"keystoneauth1==5.13.0",
|
||||
"kiwisolver==1.4.9",
|
||||
"knack==0.11.0",
|
||||
"kombu==5.6.2",
|
||||
"kubernetes==32.0.1",
|
||||
"lxml==5.3.2",
|
||||
"lz4==4.4.5",
|
||||
"markdown==3.10.2",
|
||||
"markdown-it-py==4.0.0",
|
||||
"markupsafe==3.0.3",
|
||||
"marshmallow==4.3.0",
|
||||
"matplotlib==3.10.8",
|
||||
"mccabe==0.7.0",
|
||||
"mdurl==0.1.2",
|
||||
"microsoft-kiota-abstractions==1.9.2",
|
||||
"microsoft-kiota-authentication-azure==1.9.2",
|
||||
"microsoft-kiota-http==1.9.2",
|
||||
"microsoft-kiota-serialization-form==1.9.2",
|
||||
"microsoft-kiota-serialization-json==1.9.2",
|
||||
"microsoft-kiota-serialization-multipart==1.9.2",
|
||||
"microsoft-kiota-serialization-text==1.9.2",
|
||||
"microsoft-security-utilities-secret-masker==1.0.0b4",
|
||||
"msal==1.35.0b1",
|
||||
"msal-extensions==1.2.0",
|
||||
"msgraph-core==1.3.8",
|
||||
"msgraph-sdk==1.55.0",
|
||||
"msrest==0.7.1",
|
||||
"msrestazure==0.6.4.post1",
|
||||
"multidict==6.7.1",
|
||||
"mypy==1.10.1",
|
||||
"mypy-extensions==1.1.0",
|
||||
"narwhals==2.16.0",
|
||||
"neo4j==6.1.0",
|
||||
"nest-asyncio==1.6.0",
|
||||
"nltk==3.9.4",
|
||||
"numpy==2.0.2",
|
||||
"oauthlib==3.3.1",
|
||||
"oci==2.169.0",
|
||||
"openai==1.109.1",
|
||||
"openstacksdk==4.2.0",
|
||||
"opentelemetry-api==1.39.1",
|
||||
"opentelemetry-sdk==1.39.1",
|
||||
"opentelemetry-semantic-conventions==0.60b1",
|
||||
"os-service-types==1.8.2",
|
||||
"packageurl-python==0.17.6",
|
||||
"packaging==26.0",
|
||||
"pagerduty==6.1.0",
|
||||
"pandas==2.2.3",
|
||||
"pbr==7.0.3",
|
||||
"pillow==12.2.0",
|
||||
"pkginfo==1.12.1.2",
|
||||
"platformdirs==4.5.1",
|
||||
"plotly==6.5.2",
|
||||
"pluggy==1.6.0",
|
||||
"policyuniverse==1.5.1.20231109",
|
||||
"portalocker==2.10.1",
|
||||
"prek==0.3.9",
|
||||
"prompt-toolkit==3.0.52",
|
||||
"propcache==0.4.1",
|
||||
"proto-plus==1.27.0",
|
||||
"protobuf==6.33.5",
|
||||
"psutil==7.2.2",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"py-deviceid==0.1.1",
|
||||
"py-iam-expand==0.1.0",
|
||||
"py-ocsf-models==0.8.1",
|
||||
"pyasn1==0.6.3",
|
||||
"pyasn1-modules==0.4.2",
|
||||
"pycodestyle==2.14.0",
|
||||
"pycparser==3.0",
|
||||
"pydantic==2.12.5",
|
||||
"pydantic-core==2.41.5",
|
||||
"pygithub==2.8.0",
|
||||
"pygments==2.20.0",
|
||||
"pyjwt==2.12.1",
|
||||
"pylint==3.2.5",
|
||||
"pymsalruntime==0.18.1",
|
||||
"pynacl==1.6.2",
|
||||
"pyopenssl==26.0.0",
|
||||
"pyparsing==3.3.2",
|
||||
"pyreadline3==3.5.4",
|
||||
"pysocks==1.7.1",
|
||||
"pytest==9.0.3",
|
||||
"pytest-celery==1.3.0",
|
||||
"pytest-cov==5.0.0",
|
||||
"pytest-django==4.8.0",
|
||||
"pytest-docker-tools==3.1.9",
|
||||
"pytest-env==1.1.3",
|
||||
"pytest-randomly==3.15.0",
|
||||
"pytest-xdist==3.6.1",
|
||||
"python-crontab==3.3.0",
|
||||
"python-dateutil==2.9.0.post0",
|
||||
"python-digitalocean==1.17.0",
|
||||
"python3-saml==1.16.0",
|
||||
"pytz==2025.1",
|
||||
"pywin32==311",
|
||||
"pyyaml==6.0.3",
|
||||
"redis==7.1.0",
|
||||
"referencing==0.37.0",
|
||||
"regex==2026.1.15",
|
||||
"reportlab==4.4.10",
|
||||
"requests==2.33.1",
|
||||
"requests-file==3.0.1",
|
||||
"requests-oauthlib==2.0.0",
|
||||
"requestsexceptions==1.4.0",
|
||||
"retrying==1.4.2",
|
||||
"rich==14.3.2",
|
||||
"rpds-py==0.30.0",
|
||||
"rsa==4.9.1",
|
||||
"ruamel-yaml==0.19.1",
|
||||
"ruff==0.5.0",
|
||||
"s3transfer==0.14.0",
|
||||
"scaleway==2.10.3",
|
||||
"scaleway-core==2.10.3",
|
||||
"schema==0.7.5",
|
||||
"sentry-sdk==2.56.0",
|
||||
"setuptools==80.10.2",
|
||||
"shellingham==1.5.4",
|
||||
"shodan==1.31.0",
|
||||
"six==1.17.0",
|
||||
"slack-sdk==3.39.0",
|
||||
"sniffio==1.3.1",
|
||||
"sqlparse==0.5.5",
|
||||
"statsd==4.0.1",
|
||||
"std-uritemplate==2.0.8",
|
||||
"stevedore==5.6.0",
|
||||
"tabulate==0.9.0",
|
||||
"tenacity==9.1.2",
|
||||
"tldextract==5.3.1",
|
||||
"tomlkit==0.14.0",
|
||||
"tqdm==4.67.1",
|
||||
"typer==0.21.1",
|
||||
"types-aiobotocore-ecr==3.1.1",
|
||||
"typing-extensions==4.15.0",
|
||||
"typing-inspection==0.4.2",
|
||||
"tzdata==2025.3",
|
||||
"tzlocal==5.3.1",
|
||||
"uritemplate==4.2.0",
|
||||
"urllib3==2.6.3",
|
||||
"uuid6==2024.7.10",
|
||||
"vine==5.1.0",
|
||||
"vulture==2.14",
|
||||
"wcwidth==0.5.3",
|
||||
"websocket-client==1.9.0",
|
||||
"werkzeug==3.1.7",
|
||||
"workos==6.0.4",
|
||||
"wrapt==1.17.3",
|
||||
"xlsxwriter==3.2.9",
|
||||
"xmlsec==1.3.14",
|
||||
"xmltodict==1.0.2",
|
||||
"yarl==1.22.0",
|
||||
"zipp==3.23.0",
|
||||
"zope-event==6.1",
|
||||
"zope-interface==8.2",
|
||||
"zstd==1.5.7.3"
|
||||
]
|
||||
# prowler@master needs okta==3.4.2; cartography 0.135.0 declares okta<1.0.0 for an
|
||||
# integration prowler does not import.
|
||||
override-dependencies = [
|
||||
"okta==3.4.2"
|
||||
]
|
||||
|
||||
@@ -484,8 +484,8 @@ AWS_BEDROCK_PRIVESC_PASSROLE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust Bedrock service (can be passed to Bedrock)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
// Find roles that trust the Bedrock AgentCore service (can be passed to a code interpreter)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
WHERE any(resource IN stmt_passrole.resource WHERE
|
||||
resource = '*'
|
||||
OR target_role.arn CONTAINS resource
|
||||
@@ -536,8 +536,8 @@ AWS_BEDROCK_PRIVESC_INVOKE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust Bedrock service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
// Find roles that trust the Bedrock AgentCore service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
|
||||
WITH collect(path_principal) + collect(path_target) AS paths
|
||||
UNWIND paths AS p
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
#!/bin/bash
|
||||
# Run Prowler against All AWS Accounts in an AWS Organization
|
||||
|
||||
# Activate Poetry Environment
|
||||
eval "$(poetry env activate)"
|
||||
# Activate uv-managed virtualenv
|
||||
# shellcheck disable=SC1091
|
||||
source .venv/bin/activate
|
||||
|
||||
# Show Prowler Version
|
||||
prowler -v
|
||||
|
||||
@@ -0,0 +1,335 @@
|
||||
# AWS Inventory Connectivity Graph
|
||||
|
||||
A community-contributed tool that generates interactive connectivity graphs from Prowler AWS scans, visualizing relationships between AWS resources with zero additional API calls.
|
||||
|
||||
## Overview
|
||||
|
||||
This tool extends Prowler by producing two artifacts after a scan completes:
|
||||
|
||||
- **`<output>.inventory.json`** – Machine-readable graph (nodes + edges)
|
||||
- **`<output>.inventory.html`** – Interactive D3.js force-directed visualization
|
||||
|
||||
### Why?
|
||||
|
||||
Prowler's existing outputs (CSV, ASFF, OCSF, HTML) report individual check findings but provide no cross-service topology view. Security engineers need to understand **how** resources are connected—which Lambda functions sit inside which VPC, which IAM roles can be assumed by which services, which event sources trigger which functions—before they can reason about attack paths, blast-radius, or lateral-movement risk.
|
||||
|
||||
This tool fills that gap by building a connectivity graph from the service clients that are already loaded during a Prowler scan.
|
||||
|
||||
## Features
|
||||
|
||||
### Supported AWS Services
|
||||
|
||||
The tool currently extracts connectivity information from:
|
||||
|
||||
- **Lambda** – Functions, VPC/subnet/SG edges, event source mappings, layers, DLQ, KMS
|
||||
- **EC2** – Instances, security groups, subnet/VPC edges
|
||||
- **VPC** – VPCs, subnets, peering connections
|
||||
- **RDS** – DB instances, VPC/SG/cluster/KMS edges
|
||||
- **ELBv2** – ALB/NLB load balancers, SG and VPC edges
|
||||
- **S3** – Buckets, replication targets, logging buckets, KMS keys
|
||||
- **IAM** – Roles, trust-relationship edges (who can assume what)
|
||||
|
||||
### Edge Semantic Types
|
||||
|
||||
Edges are typed for downstream filtering and attack-path analysis:
|
||||
|
||||
- `network` – Resources share a network path (VPC/subnet/SG)
|
||||
- `iam` – IAM trust or permission relationship
|
||||
- `triggers` – One resource can invoke another (event source → Lambda)
|
||||
- `data_flow` – Data is written/read (Lambda → SQS dead-letter queue)
|
||||
- `depends_on` – Soft dependency (Lambda layer, subnet belongs to VPC)
|
||||
- `routes_to` – Traffic routing (LB → target)
|
||||
- `replicates_to` – S3 replication
|
||||
- `encrypts` – KMS key encrypts the resource
|
||||
- `logs_to` – Logging relationship
|
||||
|
||||
### Interactive HTML Graph Features
|
||||
|
||||
- Force-directed layout with drag-and-drop node pinning
|
||||
- Zoom / pan (mouse wheel + click-drag on background)
|
||||
- Per-service color-coded nodes with a legend
|
||||
- Hover tooltips showing ARN + all metadata properties
|
||||
- Service filter dropdown (show only Lambda, EC2, RDS, etc.)
|
||||
- Adjustable link-distance and charge-strength physics sliders
|
||||
- Edge labels on every arrow
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.9.1 or higher
|
||||
- Prowler installed and configured (see [Prowler documentation](https://docs.prowler.com/))
|
||||
|
||||
### Setup
|
||||
|
||||
1. Clone or download this directory to your local machine
|
||||
2. Ensure Prowler is installed and working
|
||||
3. No additional dependencies required beyond Prowler's existing requirements
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
Run Prowler with your desired checks, then use the inventory graph script:
|
||||
|
||||
```bash
|
||||
# Run Prowler scan (example)
|
||||
prowler aws --output-formats csv
|
||||
|
||||
# Generate inventory graph from the scan
|
||||
python contrib/inventory-graph/inventory_graph.py --output-directory ./output
|
||||
```
|
||||
|
||||
### Command-Line Options
|
||||
|
||||
```bash
|
||||
python contrib/inventory-graph/inventory_graph.py [OPTIONS]
|
||||
|
||||
Options:
|
||||
--output-directory DIR Directory to save output files (default: ./output)
|
||||
--output-filename NAME Base filename without extension (default: prowler-inventory-<timestamp>)
|
||||
--help Show this help message and exit
|
||||
```
|
||||
|
||||
### Example Workflow
|
||||
|
||||
```bash
|
||||
# 1. Run a Prowler scan on your AWS account
|
||||
prowler aws --profile my-aws-profile --output-formats csv html
|
||||
|
||||
# 2. Generate the inventory graph
|
||||
python contrib/inventory-graph/inventory_graph.py \
|
||||
--output-directory ./output \
|
||||
--output-filename my-aws-inventory
|
||||
|
||||
# 3. Open the HTML file in your browser
|
||||
open output/my-aws-inventory.inventory.html
|
||||
```
|
||||
|
||||
### Integration with Prowler Scans
|
||||
|
||||
The tool reads from already-loaded AWS service clients in memory (via `sys.modules`). This means:
|
||||
|
||||
- **Zero extra AWS API calls** – Uses data already collected during the Prowler scan
|
||||
- **Graceful degradation** – Services not scanned are silently skipped
|
||||
- **Flexible** – Works with any subset of Prowler checks
|
||||
|
||||
## Output Files
|
||||
|
||||
### JSON Output (`*.inventory.json`)
|
||||
|
||||
Machine-readable graph structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"generated_at": "2026-03-19T12:34:56Z",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "arn:aws:lambda:us-east-1:123456789012:function:my-function",
|
||||
"type": "lambda_function",
|
||||
"name": "my-function",
|
||||
"service": "lambda",
|
||||
"region": "us-east-1",
|
||||
"account_id": "123456789012",
|
||||
"properties": {
|
||||
"runtime": "python3.9",
|
||||
"vpc_id": "vpc-abc123"
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"source_id": "arn:aws:lambda:...",
|
||||
"target_id": "arn:aws:ec2:...:vpc/vpc-abc123",
|
||||
"edge_type": "network",
|
||||
"label": "in-vpc"
|
||||
}
|
||||
],
|
||||
"stats": {
|
||||
"node_count": 42,
|
||||
"edge_count": 87
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### HTML Output (`*.inventory.html`)
|
||||
|
||||
Self-contained interactive visualization that opens in any modern browser. No server or build step required.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Design Decisions
|
||||
|
||||
| Decision | Rationale |
|
||||
|----------|-----------|
|
||||
| **Read from sys.modules** | Zero extra AWS API calls; services not scanned are silently skipped |
|
||||
| **Self-contained HTML** | D3.js v7 via CDN; no server, no build step; opens in any browser |
|
||||
| **One extractor per service** | Each extractor is independently testable; adding a new service = one new file + one line in the registry |
|
||||
| **Typed edges** | Semantic types allow downstream consumers (attack-path tools, Neo4j import) to filter by relationship class |
|
||||
|
||||
### Project Structure
|
||||
|
||||
```
|
||||
contrib/inventory-graph/
|
||||
├── README.md # This file
|
||||
├── inventory_graph.py # Main entry point script
|
||||
├── lib/
|
||||
│ ├── __init__.py
|
||||
│ ├── models.py # ResourceNode, ResourceEdge, ConnectivityGraph dataclasses
|
||||
│ ├── graph_builder.py # Reads loaded service clients from sys.modules
|
||||
│ ├── inventory_output.py # write_json(), write_html()
|
||||
│ └── extractors/
|
||||
│ ├── __init__.py
|
||||
│ ├── lambda_extractor.py # Lambda functions → VPC/subnet/SG/event-sources/layers/DLQ/KMS
|
||||
│ ├── ec2_extractor.py # EC2 instances + security groups → subnet/VPC
|
||||
│ ├── vpc_extractor.py # VPCs, subnets, peering connections
|
||||
│ ├── rds_extractor.py # RDS instances → VPC/SG/cluster/KMS
|
||||
│ ├── elbv2_extractor.py # ALB/NLB load balancers → SG/VPC
|
||||
│ ├── s3_extractor.py # S3 buckets → replication targets/logging buckets/KMS keys
|
||||
│ └── iam_extractor.py # IAM roles + trust-relationship edges
|
||||
└── examples/
|
||||
└── sample_output.html # Example output (optional)
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
### Smoke Test (No AWS Credentials Needed)
|
||||
|
||||
```python
|
||||
import sys
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# Wire a fake Lambda client
|
||||
mock_module = MagicMock()
|
||||
mock_fn = MagicMock()
|
||||
mock_fn.arn = "arn:aws:lambda:us-east-1:123:function:test"
|
||||
mock_fn.name = "test"
|
||||
mock_fn.region = "us-east-1"
|
||||
mock_fn.vpc_id = "vpc-abc"
|
||||
mock_fn.security_groups = ["sg-111"]
|
||||
mock_fn.subnet_ids = {"subnet-aaa"}
|
||||
mock_fn.environment = None
|
||||
mock_fn.kms_key_arn = None
|
||||
mock_fn.layers = []
|
||||
mock_fn.dead_letter_config = None
|
||||
mock_fn.event_source_mappings = []
|
||||
mock_module.awslambda_client.functions = {mock_fn.arn: mock_fn}
|
||||
mock_module.awslambda_client.audited_account = "123"
|
||||
sys.modules["prowler.providers.aws.services.awslambda.awslambda_client"] = mock_module
|
||||
|
||||
from contrib.inventory_graph.lib.graph_builder import build_graph
|
||||
from contrib.inventory_graph.lib.inventory_output import write_json, write_html
|
||||
|
||||
graph = build_graph()
|
||||
write_json(graph, "/tmp/test.inventory.json")
|
||||
write_html(graph, "/tmp/test.inventory.html")
|
||||
# Open /tmp/test.inventory.html in a browser
|
||||
```
|
||||
|
||||
## Extending
|
||||
|
||||
### Adding a New Service
|
||||
|
||||
1. Create a new extractor file in `lib/extractors/` (e.g., `dynamodb_extractor.py`)
|
||||
2. Implement the `extract(client)` function that returns `(nodes, edges)`
|
||||
3. Register it in `lib/graph_builder.py` in the `_SERVICE_REGISTRY` tuple
|
||||
|
||||
Example extractor template:
|
||||
|
||||
```python
|
||||
from typing import List, Tuple
|
||||
from prowler.lib.outputs.inventory.models import ResourceNode, ResourceEdge
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""Extract DynamoDB tables and their relationships."""
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
for table in client.tables:
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=table.arn,
|
||||
type="dynamodb_table",
|
||||
name=table.name,
|
||||
service="dynamodb",
|
||||
region=table.region,
|
||||
account_id=client.audited_account,
|
||||
properties={"billing_mode": table.billing_mode}
|
||||
)
|
||||
)
|
||||
|
||||
# Add edges for KMS encryption, streams, etc.
|
||||
if table.kms_key_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=table.kms_key_arn,
|
||||
target_id=table.arn,
|
||||
edge_type="encrypts",
|
||||
label="encrypts"
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### No nodes discovered
|
||||
|
||||
**Problem:** The tool reports "no nodes discovered" after running.
|
||||
|
||||
**Solution:** Ensure you've run a Prowler scan first. The tool reads from in-memory service clients loaded during the scan. If no services were scanned, no nodes will be discovered.
|
||||
|
||||
### Missing services in the graph
|
||||
|
||||
**Problem:** Some AWS services are not appearing in the graph.
|
||||
|
||||
**Solution:** The tool only includes services that have been scanned by Prowler. Run Prowler with the services you want to include, or run without service filters to scan all available services.
|
||||
|
||||
### HTML file doesn't display properly
|
||||
|
||||
**Problem:** The HTML visualization doesn't load or shows errors.
|
||||
|
||||
**Solution:**
|
||||
- Ensure you're opening the file in a modern browser (Chrome, Firefox, Safari, Edge)
|
||||
- Check your browser's console for JavaScript errors
|
||||
- Verify the file was generated completely (check file size > 0)
|
||||
- The HTML requires internet access to load D3.js from CDN
|
||||
|
||||
## Roadmap
|
||||
|
||||
Potential future enhancements:
|
||||
|
||||
- [ ] Support for additional AWS services (DynamoDB, SQS, SNS, etc.)
|
||||
- [ ] Export to Neo4j / Cartography format
|
||||
- [ ] Attack path analysis integration
|
||||
- [ ] Multi-account/multi-region aggregation
|
||||
- [ ] Custom edge type filtering in HTML UI
|
||||
- [ ] Graph diff between two scans
|
||||
|
||||
## Contributing
|
||||
|
||||
This is a community contribution. If you'd like to enhance it:
|
||||
|
||||
1. Fork the Prowler repository
|
||||
2. Make your changes in `contrib/inventory-graph/`
|
||||
3. Test thoroughly
|
||||
4. Submit a pull request with a clear description
|
||||
|
||||
## License
|
||||
|
||||
This tool is part of the Prowler project and is licensed under the Apache License 2.0.
|
||||
|
||||
## Credits
|
||||
|
||||
- **Author:** [@sandiyochristan](https://github.com/sandiyochristan)
|
||||
- **Related PR:** [#10382](https://github.com/prowler-cloud/prowler/pull/10382)
|
||||
- **Prowler Project:** [prowler-cloud/prowler](https://github.com/prowler-cloud/prowler)
|
||||
|
||||
## Support
|
||||
|
||||
For issues or questions:
|
||||
|
||||
- Open an issue in the [Prowler repository](https://github.com/prowler-cloud/prowler/issues)
|
||||
- Join the [Prowler Community Slack](https://goto.prowler.com/slack)
|
||||
- Tag your issue with `contrib:inventory-graph`
|
||||
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Example: Generate AWS Inventory Graph with Mock Data
|
||||
|
||||
This example demonstrates how to use the inventory graph tool with mock AWS data.
|
||||
No AWS credentials required.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib.graph_builder import build_graph
|
||||
from lib.inventory_output import write_json, write_html
|
||||
|
||||
|
||||
def create_mock_lambda_client():
|
||||
"""Create a mock Lambda client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock Lambda function
|
||||
mock_fn = MagicMock()
|
||||
mock_fn.arn = "arn:aws:lambda:us-east-1:123456789012:function:my-test-function"
|
||||
mock_fn.name = "my-test-function"
|
||||
mock_fn.region = "us-east-1"
|
||||
mock_fn.vpc_id = "vpc-abc123"
|
||||
mock_fn.security_groups = ["sg-111222"]
|
||||
mock_fn.subnet_ids = {"subnet-aaa111", "subnet-bbb222"}
|
||||
mock_fn.environment = {"Variables": {"ENV": "production"}}
|
||||
mock_fn.kms_key_arn = (
|
||||
"arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012"
|
||||
)
|
||||
mock_fn.layers = []
|
||||
mock_fn.dead_letter_config = None
|
||||
mock_fn.event_source_mappings = []
|
||||
|
||||
mock_module.awslambda_client.functions = {mock_fn.arn: mock_fn}
|
||||
mock_module.awslambda_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def create_mock_ec2_client():
|
||||
"""Create a mock EC2 client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock EC2 instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.arn = (
|
||||
"arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0"
|
||||
)
|
||||
mock_instance.id = "i-1234567890abcdef0"
|
||||
mock_instance.region = "us-east-1"
|
||||
mock_instance.vpc_id = "vpc-abc123"
|
||||
mock_instance.subnet_id = "subnet-aaa111"
|
||||
mock_instance.security_groups = [MagicMock(id="sg-111222")]
|
||||
mock_instance.state = "running"
|
||||
mock_instance.type = "t3.micro"
|
||||
mock_instance.tags = [{"Key": "Name", "Value": "test-instance"}]
|
||||
|
||||
# Create a mock security group
|
||||
mock_sg = MagicMock()
|
||||
mock_sg.arn = "arn:aws:ec2:us-east-1:123456789012:security-group/sg-111222"
|
||||
mock_sg.id = "sg-111222"
|
||||
mock_sg.name = "test-security-group"
|
||||
mock_sg.region = "us-east-1"
|
||||
mock_sg.vpc_id = "vpc-abc123"
|
||||
|
||||
mock_module.ec2_client.instances = [mock_instance]
|
||||
mock_module.ec2_client.security_groups = [mock_sg]
|
||||
mock_module.ec2_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def create_mock_vpc_client():
|
||||
"""Create a mock VPC client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock VPC
|
||||
mock_vpc = MagicMock()
|
||||
mock_vpc.arn = "arn:aws:ec2:us-east-1:123456789012:vpc/vpc-abc123"
|
||||
mock_vpc.id = "vpc-abc123"
|
||||
mock_vpc.region = "us-east-1"
|
||||
mock_vpc.cidr_block = "10.0.0.0/16"
|
||||
mock_vpc.tags = [{"Key": "Name", "Value": "test-vpc"}]
|
||||
|
||||
# Create mock subnets
|
||||
mock_subnet1 = MagicMock()
|
||||
mock_subnet1.arn = "arn:aws:ec2:us-east-1:123456789012:subnet/subnet-aaa111"
|
||||
mock_subnet1.id = "subnet-aaa111"
|
||||
mock_subnet1.region = "us-east-1"
|
||||
mock_subnet1.vpc_id = "vpc-abc123"
|
||||
mock_subnet1.cidr_block = "10.0.1.0/24"
|
||||
mock_subnet1.availability_zone = "us-east-1a"
|
||||
|
||||
mock_subnet2 = MagicMock()
|
||||
mock_subnet2.arn = "arn:aws:ec2:us-east-1:123456789012:subnet/subnet-bbb222"
|
||||
mock_subnet2.id = "subnet-bbb222"
|
||||
mock_subnet2.region = "us-east-1"
|
||||
mock_subnet2.vpc_id = "vpc-abc123"
|
||||
mock_subnet2.cidr_block = "10.0.2.0/24"
|
||||
mock_subnet2.availability_zone = "us-east-1b"
|
||||
|
||||
mock_module.vpc_client.vpcs = [mock_vpc]
|
||||
mock_module.vpc_client.subnets = [mock_subnet1, mock_subnet2]
|
||||
mock_module.vpc_client.vpc_peering_connections = []
|
||||
mock_module.vpc_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to demonstrate the inventory graph generation."""
|
||||
print("=" * 70)
|
||||
print("AWS Inventory Graph - Mock Data Example")
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
# Create mock clients and inject them into sys.modules
|
||||
print("Creating mock AWS service clients...")
|
||||
sys.modules["prowler.providers.aws.services.awslambda.awslambda_client"] = (
|
||||
create_mock_lambda_client()
|
||||
)
|
||||
sys.modules["prowler.providers.aws.services.ec2.ec2_client"] = (
|
||||
create_mock_ec2_client()
|
||||
)
|
||||
sys.modules["prowler.providers.aws.services.vpc.vpc_client"] = (
|
||||
create_mock_vpc_client()
|
||||
)
|
||||
print("✓ Mock clients created")
|
||||
print()
|
||||
|
||||
# Build the graph
|
||||
print("Building connectivity graph...")
|
||||
graph = build_graph()
|
||||
print(f"✓ Graph built: {len(graph.nodes)} nodes, {len(graph.edges)} edges")
|
||||
print()
|
||||
|
||||
# Display discovered nodes
|
||||
print("Discovered nodes:")
|
||||
for node in graph.nodes:
|
||||
print(f" - {node.type}: {node.name} ({node.region})")
|
||||
print()
|
||||
|
||||
# Display discovered edges
|
||||
print("Discovered edges:")
|
||||
for edge in graph.edges:
|
||||
source_node = next((n for n in graph.nodes if n.id == edge.source_id), None)
|
||||
target_node = next((n for n in graph.nodes if n.id == edge.target_id), None)
|
||||
source_name = source_node.name if source_node else edge.source_id
|
||||
target_name = target_node.name if target_node else edge.target_id
|
||||
print(f" - {source_name} --[{edge.edge_type}]--> {target_name}")
|
||||
print()
|
||||
|
||||
# Write outputs
|
||||
output_dir = Path(__file__).parent
|
||||
json_path = output_dir / "example_output.inventory.json"
|
||||
html_path = output_dir / "example_output.inventory.html"
|
||||
|
||||
print("Writing output files...")
|
||||
write_json(graph, str(json_path))
|
||||
write_html(graph, str(html_path))
|
||||
print(f"✓ JSON written to: {json_path}")
|
||||
print(f"✓ HTML written to: {html_path}")
|
||||
print()
|
||||
|
||||
print("=" * 70)
|
||||
print("✓ Example complete!")
|
||||
print("=" * 70)
|
||||
print()
|
||||
print(f"Open the HTML file to view the interactive graph:")
|
||||
print(f" open {html_path}")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AWS Inventory Connectivity Graph Generator
|
||||
|
||||
A standalone tool that generates interactive connectivity graphs from Prowler AWS scans.
|
||||
This tool reads from already-loaded AWS service clients in memory and produces:
|
||||
- JSON graph (nodes + edges)
|
||||
- Interactive HTML visualization
|
||||
|
||||
Usage:
|
||||
python inventory_graph.py --output-directory ./output --output-filename my-inventory
|
||||
|
||||
For more information, see README.md
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Add the contrib directory to the path so we can import the lib modules
|
||||
CONTRIB_DIR = Path(__file__).parent
|
||||
sys.path.insert(0, str(CONTRIB_DIR))
|
||||
|
||||
from lib.graph_builder import build_graph
|
||||
from lib.inventory_output import write_json, write_html
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
"""Parse command-line arguments."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate AWS inventory connectivity graph from Prowler scan data",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
# Generate graph with default settings
|
||||
python inventory_graph.py
|
||||
|
||||
# Specify custom output directory and filename
|
||||
python inventory_graph.py --output-directory ./my-output --output-filename aws-inventory
|
||||
|
||||
# After running a Prowler scan
|
||||
prowler aws --profile my-profile
|
||||
python inventory_graph.py --output-directory ./output
|
||||
|
||||
For more information, see README.md
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--output-directory",
|
||||
"-o",
|
||||
default="./output",
|
||||
help="Directory to save output files (default: ./output)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--output-filename",
|
||||
"-f",
|
||||
default=None,
|
||||
help="Base filename without extension (default: prowler-inventory-<timestamp>)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
"-v",
|
||||
action="store_true",
|
||||
help="Enable verbose output",
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point for the inventory graph generator."""
|
||||
args = parse_arguments()
|
||||
|
||||
# Set up output paths
|
||||
output_dir = Path(args.output_directory)
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate filename with timestamp if not provided
|
||||
if args.output_filename:
|
||||
base_filename = args.output_filename
|
||||
else:
|
||||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
base_filename = f"prowler-inventory-{timestamp}"
|
||||
|
||||
json_path = output_dir / f"{base_filename}.inventory.json"
|
||||
html_path = output_dir / f"{base_filename}.inventory.html"
|
||||
|
||||
print("=" * 70)
|
||||
print("AWS Inventory Connectivity Graph Generator")
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
# Build the graph from loaded service clients
|
||||
if args.verbose:
|
||||
print("Building connectivity graph from loaded AWS service clients...")
|
||||
|
||||
graph = build_graph()
|
||||
|
||||
# Check if any nodes were discovered
|
||||
if not graph.nodes:
|
||||
print("⚠️ WARNING: No nodes discovered!")
|
||||
print()
|
||||
print("This usually means:")
|
||||
print(" 1. No Prowler scan has been run yet in this Python session")
|
||||
print(" 2. No AWS service clients are loaded in memory")
|
||||
print()
|
||||
print("To fix this:")
|
||||
print(" 1. Run a Prowler scan first: prowler aws --output-formats csv")
|
||||
print(" 2. Then run this script in the same session")
|
||||
print()
|
||||
print(
|
||||
"Alternatively, integrate this tool directly into Prowler's output pipeline."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"✓ Discovered {len(graph.nodes)} nodes and {len(graph.edges)} edges")
|
||||
print()
|
||||
|
||||
# Write outputs
|
||||
if args.verbose:
|
||||
print(f"Writing JSON output to: {json_path}")
|
||||
write_json(graph, str(json_path))
|
||||
|
||||
if args.verbose:
|
||||
print(f"Writing HTML output to: {html_path}")
|
||||
write_html(graph, str(html_path))
|
||||
|
||||
print()
|
||||
print("=" * 70)
|
||||
print("✓ Graph generation complete!")
|
||||
print("=" * 70)
|
||||
print()
|
||||
print(f"📄 JSON: {json_path}")
|
||||
print(f"🌐 HTML: {html_path}")
|
||||
print()
|
||||
print(f"Open the HTML file in your browser to explore the interactive graph:")
|
||||
print(f" open {html_path}")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print("\n\nInterrupted by user. Exiting...")
|
||||
sys.exit(130)
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error: {e}", file=sys.stderr)
|
||||
if "--verbose" in sys.argv or "-v" in sys.argv:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
@@ -0,0 +1,94 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract EC2 instance and security-group nodes with their edges.
|
||||
|
||||
Edges produced:
|
||||
- instance → security-group [network]
|
||||
- instance → subnet [network]
|
||||
- security-group → VPC [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
# EC2 Instances
|
||||
for instance in client.instances:
|
||||
name = instance.id
|
||||
for tag in instance.tags or []:
|
||||
if tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
props = {
|
||||
"instance_type": getattr(instance, "type", None),
|
||||
"state": getattr(instance, "state", None),
|
||||
"vpc_id": getattr(instance, "vpc_id", None),
|
||||
"subnet_id": getattr(instance, "subnet_id", None),
|
||||
"public_ip": getattr(instance, "public_ip_address", None),
|
||||
"private_ip": getattr(instance, "private_ip_address", None),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=instance.arn,
|
||||
type="ec2_instance",
|
||||
name=name,
|
||||
service="ec2",
|
||||
region=instance.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg_id in instance.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=instance.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
if instance.subnet_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=instance.arn,
|
||||
target_id=instance.subnet_id,
|
||||
edge_type="network",
|
||||
label="subnet",
|
||||
)
|
||||
)
|
||||
|
||||
# Security Groups
|
||||
for sg in client.security_groups.values():
|
||||
name = (
|
||||
sg.name if hasattr(sg, "name") else sg.id if hasattr(sg, "id") else sg.arn
|
||||
)
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=sg.arn,
|
||||
type="security_group",
|
||||
name=name,
|
||||
service="ec2",
|
||||
region=sg.region,
|
||||
account_id=client.audited_account,
|
||||
properties={"vpc_id": sg.vpc_id},
|
||||
)
|
||||
)
|
||||
|
||||
if sg.vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=sg.arn,
|
||||
target_id=sg.vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,60 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract ELBv2 (ALB/NLB) load balancer nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- load_balancer → security-group [network]
|
||||
- load_balancer → VPC [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for lb in client.loadbalancersv2.values():
|
||||
props = {
|
||||
"type": getattr(lb, "type", None),
|
||||
"scheme": getattr(lb, "scheme", None),
|
||||
"dns_name": getattr(lb, "dns", None),
|
||||
"vpc_id": getattr(lb, "vpc_id", None),
|
||||
}
|
||||
|
||||
name = getattr(lb, "name", lb.arn.split("/")[-2] if "/" in lb.arn else lb.arn)
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=lb.arn,
|
||||
type="load_balancer",
|
||||
name=name,
|
||||
service="elbv2",
|
||||
region=lb.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg_id in lb.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=lb.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(lb, "vpc_id", None)
|
||||
if vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=lb.arn,
|
||||
target_id=vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,84 @@
|
||||
import json
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def _parse_trust_principals(assume_role_policy: Any) -> List[str]:
|
||||
"""
|
||||
Return a flat list of principal strings from an IAM assume-role policy document.
|
||||
The policy may be a dict already or a JSON string.
|
||||
"""
|
||||
if not assume_role_policy:
|
||||
return []
|
||||
|
||||
if isinstance(assume_role_policy, str):
|
||||
try:
|
||||
assume_role_policy = json.loads(assume_role_policy)
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return []
|
||||
|
||||
principals = []
|
||||
for statement in assume_role_policy.get("Statement", []):
|
||||
principal = statement.get("Principal", {})
|
||||
if isinstance(principal, str):
|
||||
principals.append(principal)
|
||||
elif isinstance(principal, dict):
|
||||
for v in principal.values():
|
||||
if isinstance(v, list):
|
||||
principals.extend(v)
|
||||
else:
|
||||
principals.append(v)
|
||||
elif isinstance(principal, list):
|
||||
principals.extend(principal)
|
||||
|
||||
return principals
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract IAM role nodes and their trust-relationship edges.
|
||||
|
||||
Edges produced:
|
||||
- trusted-principal → role [iam] (who can assume this role)
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for role in client.roles:
|
||||
props: Dict[str, Any] = {
|
||||
"path": getattr(role, "path", None),
|
||||
"create_date": str(getattr(role, "create_date", "") or ""),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=role.arn,
|
||||
type="iam_role",
|
||||
name=role.name,
|
||||
service="iam",
|
||||
region="global",
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v},
|
||||
)
|
||||
)
|
||||
|
||||
# Trust-relationship edges: principal → role (principal CAN assume role)
|
||||
try:
|
||||
for principal in _parse_trust_principals(role.assume_role_policy):
|
||||
if principal and principal != "*":
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=principal,
|
||||
target_id=role.arn,
|
||||
edge_type="iam",
|
||||
label="can-assume",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
f"inventory iam_extractor: could not parse trust policy for {role.arn}: {e}"
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,118 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract Lambda function nodes and their edges from an awslambda_client.
|
||||
|
||||
Edges produced:
|
||||
- lambda → VPC [network]
|
||||
- lambda → subnet [network]
|
||||
- lambda → sg [network]
|
||||
- lambda → event-source[triggers] (from EventSourceMapping)
|
||||
- lambda → layer ARN [depends_on]
|
||||
- lambda → DLQ target [data_flow]
|
||||
- lambda → KMS key [encrypts]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for fn in client.functions.values():
|
||||
props = {
|
||||
"runtime": fn.runtime,
|
||||
"vpc_id": fn.vpc_id,
|
||||
}
|
||||
if fn.environment:
|
||||
props["has_env_vars"] = True
|
||||
if fn.kms_key_arn:
|
||||
props["kms_key_arn"] = fn.kms_key_arn
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=fn.arn,
|
||||
type="lambda_function",
|
||||
name=fn.name,
|
||||
service="lambda",
|
||||
region=fn.region,
|
||||
account_id=client.audited_account,
|
||||
properties=props,
|
||||
)
|
||||
)
|
||||
|
||||
# Network edges → VPC, subnets, security groups
|
||||
if fn.vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=fn.vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
for sg_id in fn.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
for subnet_id in fn.subnet_ids or set():
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=subnet_id,
|
||||
edge_type="network",
|
||||
label="subnet",
|
||||
)
|
||||
)
|
||||
|
||||
# Trigger edges from event source mappings
|
||||
for esm in getattr(fn, "event_source_mappings", []):
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=esm.event_source_arn,
|
||||
target_id=fn.arn,
|
||||
edge_type="triggers",
|
||||
label=f"esm:{esm.state}",
|
||||
)
|
||||
)
|
||||
|
||||
# Layer dependency edges
|
||||
for layer in getattr(fn, "layers", []):
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=layer.arn,
|
||||
edge_type="depends_on",
|
||||
label="layer",
|
||||
)
|
||||
)
|
||||
|
||||
# Dead-letter queue data-flow edge
|
||||
dlq = getattr(fn, "dead_letter_config", None)
|
||||
if dlq and dlq.target_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=dlq.target_arn,
|
||||
edge_type="data_flow",
|
||||
label="dlq",
|
||||
)
|
||||
)
|
||||
|
||||
# KMS encryption edge
|
||||
if fn.kms_key_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.kms_key_arn,
|
||||
target_id=fn.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,86 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract RDS DB instance nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- db_instance → security-group [network]
|
||||
- db_instance → VPC [network]
|
||||
- db_instance → cluster [depends_on]
|
||||
- db_instance → KMS key [encrypts]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for db in client.db_instances.values():
|
||||
props = {
|
||||
"engine": getattr(db, "engine", None),
|
||||
"engine_version": getattr(db, "engine_version", None),
|
||||
"instance_class": getattr(db, "db_instance_class", None),
|
||||
"vpc_id": getattr(db, "vpc_id", None),
|
||||
"multi_az": getattr(db, "multi_az", None),
|
||||
"publicly_accessible": getattr(db, "publicly_accessible", None),
|
||||
"storage_encrypted": getattr(db, "storage_encrypted", None),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=db.arn,
|
||||
type="rds_instance",
|
||||
name=db.id,
|
||||
service="rds",
|
||||
region=db.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg in getattr(db, "security_groups", []):
|
||||
sg_id = sg if isinstance(sg, str) else getattr(sg, "id", str(sg))
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(db, "vpc_id", None)
|
||||
if vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
cluster_arn = getattr(db, "cluster_arn", None)
|
||||
if cluster_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=cluster_arn,
|
||||
edge_type="depends_on",
|
||||
label="cluster-member",
|
||||
)
|
||||
)
|
||||
|
||||
kms_key_id = getattr(db, "kms_key_id", None)
|
||||
if kms_key_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=kms_key_id,
|
||||
target_id=db.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,92 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract S3 bucket nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- bucket → replication-target bucket [replicates_to]
|
||||
- bucket → KMS key [encrypts]
|
||||
- bucket → logging bucket [logs_to]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for bucket in client.buckets.values():
|
||||
encryption = getattr(bucket, "encryption", None)
|
||||
versioning = getattr(bucket, "versioning_enabled", None)
|
||||
logging = getattr(bucket, "logging", None)
|
||||
public = getattr(bucket, "public_access_block", None)
|
||||
|
||||
props = {}
|
||||
if versioning is not None:
|
||||
props["versioning"] = versioning
|
||||
if encryption:
|
||||
enc_type = getattr(encryption, "type", str(encryption))
|
||||
props["encryption"] = enc_type
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=bucket.arn,
|
||||
type="s3_bucket",
|
||||
name=bucket.name,
|
||||
service="s3",
|
||||
region=bucket.region,
|
||||
account_id=client.audited_account,
|
||||
properties=props,
|
||||
)
|
||||
)
|
||||
|
||||
# Replication edges
|
||||
for rule in getattr(bucket, "replication_rules", None) or []:
|
||||
dest_bucket = getattr(rule, "destination_bucket", None)
|
||||
if dest_bucket:
|
||||
dest_arn = (
|
||||
dest_bucket
|
||||
if dest_bucket.startswith("arn:")
|
||||
else f"arn:aws:s3:::{dest_bucket}"
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=bucket.arn,
|
||||
target_id=dest_arn,
|
||||
edge_type="replicates_to",
|
||||
label="s3-replication",
|
||||
)
|
||||
)
|
||||
|
||||
# Logging edges
|
||||
if logging:
|
||||
target_bucket = getattr(logging, "target_bucket", None)
|
||||
if target_bucket:
|
||||
target_arn = (
|
||||
target_bucket
|
||||
if target_bucket.startswith("arn:")
|
||||
else f"arn:aws:s3:::{target_bucket}"
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=bucket.arn,
|
||||
target_id=target_arn,
|
||||
edge_type="logs_to",
|
||||
label="access-logs",
|
||||
)
|
||||
)
|
||||
|
||||
# KMS encryption edges
|
||||
if encryption:
|
||||
kms_arn = getattr(encryption, "kms_master_key_id", None)
|
||||
if kms_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=kms_arn,
|
||||
target_id=bucket.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,92 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract VPC and subnet nodes with their edges.
|
||||
|
||||
Edges produced:
|
||||
- subnet → VPC [depends_on]
|
||||
- peering connection between VPCs [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
# VPCs
|
||||
for vpc in client.vpcs.values():
|
||||
name = vpc.id if hasattr(vpc, "id") else vpc.arn
|
||||
for tag in vpc.tags or []:
|
||||
if isinstance(tag, dict) and tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=vpc.arn,
|
||||
type="vpc",
|
||||
name=name,
|
||||
service="vpc",
|
||||
region=vpc.region,
|
||||
account_id=client.audited_account,
|
||||
properties={
|
||||
"cidr_block": getattr(vpc, "cidr_block", None),
|
||||
"is_default": getattr(vpc, "is_default", None),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# VPC Subnets
|
||||
for subnet in client.vpc_subnets.values():
|
||||
name = subnet.id if hasattr(subnet, "id") else subnet.arn
|
||||
for tag in getattr(subnet, "tags", None) or []:
|
||||
if isinstance(tag, dict) and tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=subnet.arn,
|
||||
type="subnet",
|
||||
name=name,
|
||||
service="vpc",
|
||||
region=subnet.region,
|
||||
account_id=client.audited_account,
|
||||
properties={
|
||||
"vpc_id": getattr(subnet, "vpc_id", None),
|
||||
"cidr_block": getattr(subnet, "cidr_block", None),
|
||||
"availability_zone": getattr(subnet, "availability_zone", None),
|
||||
"public": getattr(subnet, "public", None),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(subnet, "vpc_id", None)
|
||||
if vpc_id:
|
||||
# Find the VPC ARN for this vpc_id
|
||||
vpc_arn = next(
|
||||
(v.arn for v in client.vpcs.values() if v.id == vpc_id),
|
||||
vpc_id,
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=subnet.arn,
|
||||
target_id=vpc_arn,
|
||||
edge_type="depends_on",
|
||||
label="subnet-of",
|
||||
)
|
||||
)
|
||||
|
||||
# VPC Peering Connections
|
||||
for peering in getattr(client, "vpc_peering_connections", {}).values():
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=peering.arn,
|
||||
target_id=getattr(peering, "accepter_vpc_id", peering.arn),
|
||||
edge_type="network",
|
||||
label="vpc-peer",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -0,0 +1,106 @@
|
||||
"""
|
||||
graph_builder.py
|
||||
----------------
|
||||
Builds a ConnectivityGraph by reading already-loaded AWS service clients from
|
||||
sys.modules. Only services that were actually scanned (i.e. whose client
|
||||
module is already imported) contribute nodes and edges. Unknown / unloaded
|
||||
services are silently skipped, so the output degrades gracefully when only a
|
||||
subset of checks has been run.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from typing import Tuple
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ConnectivityGraph
|
||||
|
||||
# Registry: (sys.modules key, attribute name inside that module, extractor module path)
|
||||
_SERVICE_REGISTRY: Tuple[Tuple[str, str, str], ...] = (
|
||||
(
|
||||
"prowler.providers.aws.services.awslambda.awslambda_client",
|
||||
"awslambda_client",
|
||||
"lib.extractors.lambda_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.ec2.ec2_client",
|
||||
"ec2_client",
|
||||
"lib.extractors.ec2_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.vpc.vpc_client",
|
||||
"vpc_client",
|
||||
"lib.extractors.vpc_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.rds.rds_client",
|
||||
"rds_client",
|
||||
"lib.extractors.rds_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.elbv2.elbv2_client",
|
||||
"elbv2_client",
|
||||
"lib.extractors.elbv2_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.s3.s3_client",
|
||||
"s3_client",
|
||||
"lib.extractors.s3_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.iam.iam_client",
|
||||
"iam_client",
|
||||
"lib.extractors.iam_extractor",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def build_graph() -> ConnectivityGraph:
|
||||
"""
|
||||
Iterate over every registered service, check whether its client module is
|
||||
already loaded, and call the corresponding extractor.
|
||||
|
||||
Returns a ConnectivityGraph with all discovered nodes and edges.
|
||||
Duplicate node IDs are silently deduplicated (first occurrence wins).
|
||||
"""
|
||||
graph = ConnectivityGraph()
|
||||
seen_node_ids: set = set()
|
||||
|
||||
for client_module_key, client_attr, extractor_module_key in _SERVICE_REGISTRY:
|
||||
client_module = sys.modules.get(client_module_key)
|
||||
if client_module is None:
|
||||
continue
|
||||
|
||||
service_client = getattr(client_module, client_attr, None)
|
||||
if service_client is None:
|
||||
continue
|
||||
|
||||
extractor_module = sys.modules.get(extractor_module_key)
|
||||
if extractor_module is None:
|
||||
try:
|
||||
import importlib
|
||||
|
||||
extractor_module = importlib.import_module(extractor_module_key)
|
||||
except ImportError as e:
|
||||
logger.debug(
|
||||
f"inventory graph_builder: cannot import extractor {extractor_module_key}: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
nodes, edges = extractor_module.extract(service_client)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory graph_builder: extractor {extractor_module_key} failed: "
|
||||
f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
for node in nodes:
|
||||
if node.id not in seen_node_ids:
|
||||
graph.add_node(node)
|
||||
seen_node_ids.add(node.id)
|
||||
|
||||
for edge in edges:
|
||||
graph.add_edge(edge)
|
||||
|
||||
return graph
|
||||
@@ -0,0 +1,502 @@
|
||||
"""
|
||||
inventory_output.py
|
||||
-------------------
|
||||
Writes the ConnectivityGraph produced by graph_builder to two files:
|
||||
|
||||
<output_path>.inventory.json – machine-readable graph (nodes + edges)
|
||||
<output_path>.inventory.html – interactive D3.js force-directed graph
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from dataclasses import asdict
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ConnectivityGraph
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# JSON output
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def write_json(graph: ConnectivityGraph, file_path: str) -> None:
|
||||
"""Serialise the graph to a JSON file."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
data = {
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
"nodes": [asdict(n) for n in graph.nodes],
|
||||
"edges": [asdict(e) for e in graph.edges],
|
||||
"stats": {
|
||||
"node_count": len(graph.nodes),
|
||||
"edge_count": len(graph.edges),
|
||||
},
|
||||
}
|
||||
with open(file_path, "w", encoding="utf-8") as fh:
|
||||
json.dump(data, fh, indent=2, default=str)
|
||||
logger.info(f"Inventory graph JSON written to {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory_output.write_json: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# HTML output (self-contained, D3.js CDN)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Colour palette per node type
|
||||
_NODE_COLOURS = {
|
||||
"lambda_function": "#f59e0b",
|
||||
"ec2_instance": "#3b82f6",
|
||||
"security_group": "#6366f1",
|
||||
"vpc": "#10b981",
|
||||
"subnet": "#34d399",
|
||||
"rds_instance": "#ef4444",
|
||||
"load_balancer": "#8b5cf6",
|
||||
"s3_bucket": "#06b6d4",
|
||||
"iam_role": "#f97316",
|
||||
"default": "#94a3b8",
|
||||
}
|
||||
|
||||
# Edge stroke colours per edge type
|
||||
_EDGE_COLOURS = {
|
||||
"network": "#64748b",
|
||||
"iam": "#f97316",
|
||||
"triggers": "#a855f7",
|
||||
"data_flow": "#0ea5e9",
|
||||
"depends_on": "#94a3b8",
|
||||
"routes_to": "#22c55e",
|
||||
"replicates_to": "#ec4899",
|
||||
"encrypts": "#eab308",
|
||||
"logs_to": "#78716c",
|
||||
}
|
||||
|
||||
_HTML_TEMPLATE = """\
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<title>Prowler – AWS Connectivity Graph</title>
|
||||
<script src="https://d3js.org/d3.v7.min.js"></script>
|
||||
<style>
|
||||
*, *::before, *::after {{ box-sizing: border-box; }}
|
||||
body {{
|
||||
margin: 0;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: #0f172a;
|
||||
color: #e2e8f0;
|
||||
}}
|
||||
#header {{
|
||||
padding: 12px 20px;
|
||||
background: #1e293b;
|
||||
border-bottom: 1px solid #334155;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
}}
|
||||
#header h1 {{ margin: 0; font-size: 18px; font-weight: 700; }}
|
||||
#header .stats {{ font-size: 13px; color: #94a3b8; }}
|
||||
#controls {{
|
||||
padding: 8px 20px;
|
||||
background: #1e293b;
|
||||
border-bottom: 1px solid #334155;
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
}}
|
||||
#controls label {{ font-size: 12px; color: #94a3b8; }}
|
||||
#controls select, #controls input[type=range] {{
|
||||
background: #0f172a;
|
||||
color: #e2e8f0;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 4px;
|
||||
padding: 3px 6px;
|
||||
font-size: 12px;
|
||||
}}
|
||||
#graph-container {{ width: 100%; height: calc(100vh - 100px); position: relative; }}
|
||||
svg {{ width: 100%; height: 100%; }}
|
||||
.node circle {{
|
||||
stroke: #1e293b;
|
||||
stroke-width: 1.5px;
|
||||
cursor: pointer;
|
||||
transition: r 0.15s;
|
||||
}}
|
||||
.node circle:hover {{ stroke-width: 3px; }}
|
||||
.node text {{
|
||||
font-size: 10px;
|
||||
fill: #e2e8f0;
|
||||
pointer-events: none;
|
||||
text-shadow: 0 0 4px #0f172a;
|
||||
}}
|
||||
.link {{
|
||||
stroke-opacity: 0.6;
|
||||
stroke-width: 1.5px;
|
||||
}}
|
||||
.link-label {{
|
||||
font-size: 8px;
|
||||
fill: #94a3b8;
|
||||
pointer-events: none;
|
||||
}}
|
||||
#tooltip {{
|
||||
position: fixed;
|
||||
background: #1e293b;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
font-size: 12px;
|
||||
pointer-events: none;
|
||||
max-width: 320px;
|
||||
word-break: break-all;
|
||||
z-index: 9999;
|
||||
display: none;
|
||||
}}
|
||||
#tooltip strong {{ color: #f8fafc; }}
|
||||
#tooltip .prop {{ color: #94a3b8; margin-top: 4px; }}
|
||||
#legend {{
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
right: 10px;
|
||||
background: rgba(30,41,59,0.9);
|
||||
border: 1px solid #334155;
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
font-size: 11px;
|
||||
}}
|
||||
#legend h3 {{ margin: 0 0 6px; font-size: 12px; }}
|
||||
.legend-row {{ display: flex; align-items: center; gap: 6px; margin: 3px 0; }}
|
||||
.legend-dot {{ width: 12px; height: 12px; border-radius: 50%; flex-shrink: 0; }}
|
||||
.legend-line {{ width: 20px; height: 2px; flex-shrink: 0; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="header">
|
||||
<h1>🔗 AWS Connectivity Graph</h1>
|
||||
<span class="stats" id="stat-label">Generated: {generated_at}</span>
|
||||
</div>
|
||||
<div id="controls">
|
||||
<label>Filter service:
|
||||
<select id="filter-service">
|
||||
<option value="">All services</option>
|
||||
</select>
|
||||
</label>
|
||||
<label>Link distance:
|
||||
<input type="range" id="link-distance" min="40" max="300" value="120"/>
|
||||
</label>
|
||||
<label>Charge strength:
|
||||
<input type="range" id="charge-strength" min="-800" max="-20" value="-250"/>
|
||||
</label>
|
||||
<span class="stats" id="visible-count"></span>
|
||||
</div>
|
||||
<div id="graph-container">
|
||||
<svg id="graph-svg"></svg>
|
||||
<div id="tooltip"></div>
|
||||
<div id="legend">
|
||||
<h3>Node types</h3>
|
||||
{legend_nodes_html}
|
||||
<h3 style="margin-top:8px">Edge types</h3>
|
||||
{legend_edges_html}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const RAW_NODES = {nodes_json};
|
||||
const RAW_EDGES = {edges_json};
|
||||
const NODE_COLOURS = {node_colours_json};
|
||||
const EDGE_COLOURS = {edge_colours_json};
|
||||
|
||||
// ── helpers ──────────────────────────────────────────────────────────────
|
||||
function nodeColour(d) {{
|
||||
return NODE_COLOURS[d.type] || NODE_COLOURS["default"];
|
||||
}}
|
||||
function edgeColour(d) {{
|
||||
return EDGE_COLOURS[d.edge_type] || "#94a3b8";
|
||||
}}
|
||||
function nodeRadius(d) {{
|
||||
const base = {{
|
||||
lambda_function: 9, ec2_instance: 10, vpc: 14, subnet: 8,
|
||||
security_group: 7, rds_instance: 11, load_balancer: 12,
|
||||
s3_bucket: 9, iam_role: 9
|
||||
}};
|
||||
return base[d.type] || 8;
|
||||
}}
|
||||
|
||||
// ── filter controls ───────────────────────────────────────────────────────
|
||||
const services = [...new Set(RAW_NODES.map(n => n.service))].sort();
|
||||
const sel = document.getElementById("filter-service");
|
||||
services.forEach(s => {{
|
||||
const o = document.createElement("option");
|
||||
o.value = s; o.textContent = s;
|
||||
sel.appendChild(o);
|
||||
}});
|
||||
|
||||
// ── D3 setup ──────────────────────────────────────────────────────────────
|
||||
const svg = d3.select("#graph-svg");
|
||||
const container = svg.append("g");
|
||||
|
||||
// zoom
|
||||
svg.call(
|
||||
d3.zoom().scaleExtent([0.05, 8])
|
||||
.on("zoom", e => container.attr("transform", e.transform))
|
||||
);
|
||||
|
||||
// arrowhead marker
|
||||
const defs = svg.append("defs");
|
||||
defs.append("marker")
|
||||
.attr("id", "arrow")
|
||||
.attr("viewBox", "0 -5 10 10")
|
||||
.attr("refX", 20).attr("refY", 0)
|
||||
.attr("markerWidth", 6).attr("markerHeight", 6)
|
||||
.attr("orient", "auto")
|
||||
.append("path")
|
||||
.attr("d", "M0,-5L10,0L0,5")
|
||||
.attr("fill", "#94a3b8");
|
||||
|
||||
// tooltip
|
||||
const tooltip = document.getElementById("tooltip");
|
||||
|
||||
// ── simulation ────────────────────────────────────────────────────────────
|
||||
let simulation, linkSel, nodeSel, labelSel;
|
||||
|
||||
function buildGraph(nodeFilter) {{
|
||||
// Determine which nodes to show
|
||||
const visibleNodes = nodeFilter
|
||||
? RAW_NODES.filter(n => n.service === nodeFilter)
|
||||
: RAW_NODES;
|
||||
const visibleIds = new Set(visibleNodes.map(n => n.id));
|
||||
|
||||
// Only show edges where BOTH endpoints are visible
|
||||
const visibleEdges = RAW_EDGES.filter(
|
||||
e => visibleIds.has(e.source_id) && visibleIds.has(e.target_id)
|
||||
);
|
||||
|
||||
document.getElementById("visible-count").textContent =
|
||||
`Showing ${{visibleNodes.length}} nodes · ${{visibleEdges.length}} edges`;
|
||||
|
||||
container.selectAll("*").remove();
|
||||
|
||||
if (simulation) simulation.stop();
|
||||
|
||||
const nodes = visibleNodes.map(n => ({{ ...n }}));
|
||||
const nodeIndex = Object.fromEntries(nodes.map(n => [n.id, n]));
|
||||
|
||||
const links = visibleEdges.map(e => ({{
|
||||
...e,
|
||||
source: nodeIndex[e.source_id] || e.source_id,
|
||||
target: nodeIndex[e.target_id] || e.target_id,
|
||||
}}));
|
||||
|
||||
const dist = +document.getElementById("link-distance").value;
|
||||
const charge = +document.getElementById("charge-strength").value;
|
||||
|
||||
simulation = d3.forceSimulation(nodes)
|
||||
.force("link", d3.forceLink(links).id(d => d.id).distance(dist))
|
||||
.force("charge", d3.forceManyBody().strength(charge))
|
||||
.force("center", d3.forceCenter(
|
||||
document.getElementById("graph-container").clientWidth / 2,
|
||||
document.getElementById("graph-container").clientHeight / 2
|
||||
))
|
||||
.force("collision", d3.forceCollide().radius(d => nodeRadius(d) + 6));
|
||||
|
||||
// Edges
|
||||
linkSel = container.append("g").attr("class", "links")
|
||||
.selectAll("line")
|
||||
.data(links)
|
||||
.join("line")
|
||||
.attr("class", "link")
|
||||
.attr("stroke", edgeColour)
|
||||
.attr("marker-end", "url(#arrow)");
|
||||
|
||||
// Edge labels
|
||||
labelSel = container.append("g").attr("class", "link-labels")
|
||||
.selectAll("text")
|
||||
.data(links)
|
||||
.join("text")
|
||||
.attr("class", "link-label")
|
||||
.text(d => d.label || "");
|
||||
|
||||
// Nodes
|
||||
nodeSel = container.append("g").attr("class", "nodes")
|
||||
.selectAll("g")
|
||||
.data(nodes)
|
||||
.join("g")
|
||||
.attr("class", "node")
|
||||
.call(
|
||||
d3.drag()
|
||||
.on("start", (event, d) => {{
|
||||
if (!event.active) simulation.alphaTarget(0.3).restart();
|
||||
d.fx = d.x; d.fy = d.y;
|
||||
}})
|
||||
.on("drag", (event, d) => {{ d.fx = event.x; d.fy = event.y; }})
|
||||
.on("end", (event, d) => {{
|
||||
if (!event.active) simulation.alphaTarget(0);
|
||||
d.fx = null; d.fy = null;
|
||||
}})
|
||||
)
|
||||
.on("mouseover", (event, d) => {{
|
||||
const props = Object.entries(d.properties || {{}})
|
||||
.map(([k, v]) => `<div class="prop"><b>${{k}}</b>: ${{v}}</div>`)
|
||||
.join("");
|
||||
tooltip.innerHTML = `
|
||||
<strong>${{d.name}}</strong>
|
||||
<div class="prop"><b>type</b>: ${{d.type}}</div>
|
||||
<div class="prop"><b>service</b>: ${{d.service}}</div>
|
||||
<div class="prop"><b>region</b>: ${{d.region}}</div>
|
||||
<div class="prop"><b>account</b>: ${{d.account_id}}</div>
|
||||
<div class="prop" style="word-break:break-all"><b>arn</b>: ${{d.id}}</div>
|
||||
${{props}}
|
||||
`;
|
||||
tooltip.style.display = "block";
|
||||
tooltip.style.left = (event.clientX + 12) + "px";
|
||||
tooltip.style.top = (event.clientY - 10) + "px";
|
||||
}})
|
||||
.on("mousemove", event => {{
|
||||
tooltip.style.left = (event.clientX + 12) + "px";
|
||||
tooltip.style.top = (event.clientY - 10) + "px";
|
||||
}})
|
||||
.on("mouseout", () => {{ tooltip.style.display = "none"; }});
|
||||
|
||||
nodeSel.append("circle")
|
||||
.attr("r", nodeRadius)
|
||||
.attr("fill", nodeColour);
|
||||
|
||||
nodeSel.append("text")
|
||||
.attr("dx", d => nodeRadius(d) + 3)
|
||||
.attr("dy", "0.35em")
|
||||
.text(d => d.name.length > 24 ? d.name.slice(0, 22) + "…" : d.name);
|
||||
|
||||
simulation.on("tick", () => {{
|
||||
linkSel
|
||||
.attr("x1", d => d.source.x)
|
||||
.attr("y1", d => d.source.y)
|
||||
.attr("x2", d => d.target.x)
|
||||
.attr("y2", d => d.target.y);
|
||||
|
||||
labelSel
|
||||
.attr("x", d => (d.source.x + d.target.x) / 2)
|
||||
.attr("y", d => (d.source.y + d.target.y) / 2);
|
||||
|
||||
nodeSel.attr("transform", d => `translate(${{d.x}},${{d.y}})`);
|
||||
}});
|
||||
}}
|
||||
|
||||
// Initial render
|
||||
buildGraph(null);
|
||||
|
||||
// Filter change
|
||||
sel.addEventListener("change", () => buildGraph(sel.value || null));
|
||||
|
||||
// Simulation control sliders — restart on change
|
||||
document.getElementById("link-distance").addEventListener("input", () => buildGraph(sel.value || null));
|
||||
document.getElementById("charge-strength").addEventListener("input", () => buildGraph(sel.value || null));
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def _build_legend_html(colours: dict, shape: str) -> str:
|
||||
rows = []
|
||||
for key, colour in sorted(colours.items()):
|
||||
if shape == "dot":
|
||||
rows.append(
|
||||
f'<div class="legend-row">'
|
||||
f'<div class="legend-dot" style="background:{colour}"></div>'
|
||||
f"<span>{key}</span></div>"
|
||||
)
|
||||
else:
|
||||
rows.append(
|
||||
f'<div class="legend-row">'
|
||||
f'<div class="legend-line" style="background:{colour}"></div>'
|
||||
f"<span>{key}</span></div>"
|
||||
)
|
||||
return "\n".join(rows)
|
||||
|
||||
|
||||
def write_html(graph: ConnectivityGraph, file_path: str) -> None:
|
||||
"""Render the graph as a self-contained interactive HTML page."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
|
||||
nodes_json = json.dumps(
|
||||
[
|
||||
{
|
||||
"id": n.id,
|
||||
"type": n.type,
|
||||
"name": n.name,
|
||||
"service": n.service,
|
||||
"region": n.region,
|
||||
"account_id": n.account_id,
|
||||
"properties": n.properties,
|
||||
}
|
||||
for n in graph.nodes
|
||||
],
|
||||
indent=None,
|
||||
default=str,
|
||||
)
|
||||
edges_json = json.dumps(
|
||||
[
|
||||
{
|
||||
"source_id": e.source_id,
|
||||
"target_id": e.target_id,
|
||||
"edge_type": e.edge_type,
|
||||
"label": e.label or "",
|
||||
}
|
||||
for e in graph.edges
|
||||
],
|
||||
indent=None,
|
||||
default=str,
|
||||
)
|
||||
|
||||
html = _HTML_TEMPLATE.format(
|
||||
generated_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M UTC"),
|
||||
nodes_json=nodes_json,
|
||||
edges_json=edges_json,
|
||||
node_colours_json=json.dumps(_NODE_COLOURS),
|
||||
edge_colours_json=json.dumps(_EDGE_COLOURS),
|
||||
legend_nodes_html=_build_legend_html(_NODE_COLOURS, "dot"),
|
||||
legend_edges_html=_build_legend_html(_EDGE_COLOURS, "line"),
|
||||
)
|
||||
|
||||
with open(file_path, "w", encoding="utf-8") as fh:
|
||||
fh.write(html)
|
||||
|
||||
logger.info(f"Inventory graph HTML written to {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory_output.write_html: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Convenience entry-point called from __main__.py
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def generate_inventory_outputs(output_path: str) -> None:
|
||||
"""
|
||||
Build the connectivity graph from currently-loaded service clients and write
|
||||
both JSON and HTML outputs.
|
||||
|
||||
Args:
|
||||
output_path: base file path WITHOUT extension, e.g.
|
||||
"output/prowler-output-20240101120000".
|
||||
The function appends .inventory.json and .inventory.html.
|
||||
"""
|
||||
from lib.graph_builder import build_graph
|
||||
|
||||
graph = build_graph()
|
||||
|
||||
if not graph.nodes:
|
||||
logger.warning(
|
||||
"Inventory graph: no nodes discovered. "
|
||||
"Make sure at least one AWS service was scanned before generating the inventory."
|
||||
)
|
||||
|
||||
write_json(graph, f"{output_path}.inventory.json")
|
||||
write_html(graph, f"{output_path}.inventory.html")
|
||||
@@ -0,0 +1,71 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResourceNode:
|
||||
"""
|
||||
Represents a single AWS resource as a node in the connectivity graph.
|
||||
|
||||
id : globally unique identifier — always the resource ARN
|
||||
type : coarse resource type used for grouping/colour, e.g. "lambda_function"
|
||||
name : human-readable label shown on the graph
|
||||
service : AWS service name, e.g. "lambda", "ec2", "rds"
|
||||
region : AWS region the resource lives in
|
||||
account_id: AWS account ID
|
||||
properties: additional resource-specific metadata (runtime, vpc_id, etc.)
|
||||
"""
|
||||
|
||||
id: str
|
||||
type: str
|
||||
name: str
|
||||
service: str
|
||||
region: str
|
||||
account_id: str
|
||||
properties: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResourceEdge:
|
||||
"""
|
||||
Represents a directional relationship between two resource nodes.
|
||||
|
||||
source_id : ARN of the source node
|
||||
target_id : ARN of the target node
|
||||
edge_type : semantic type of the relationship, e.g.:
|
||||
"network" – resources share a network path (VPC/subnet/SG)
|
||||
"iam" – IAM trust or permission relationship
|
||||
"triggers" – one resource can invoke another (event source → Lambda)
|
||||
"data_flow" – data is written/read (Lambda → SQS dead-letter queue)
|
||||
"depends_on" – soft dependency (Lambda layer, subnet belongs to VPC)
|
||||
"routes_to" – traffic routing (LB → target)
|
||||
"encrypts" – KMS key encrypts the resource
|
||||
label : optional short label rendered on the edge in the HTML graph
|
||||
"""
|
||||
|
||||
source_id: str
|
||||
target_id: str
|
||||
edge_type: str
|
||||
label: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConnectivityGraph:
|
||||
"""
|
||||
Container for the full inventory connectivity graph.
|
||||
|
||||
nodes: all discovered resource nodes
|
||||
edges: all discovered edges between nodes
|
||||
"""
|
||||
|
||||
nodes: List[ResourceNode] = field(default_factory=list)
|
||||
edges: List[ResourceEdge] = field(default_factory=list)
|
||||
|
||||
def add_node(self, node: ResourceNode) -> None:
|
||||
self.nodes.append(node)
|
||||
|
||||
def add_edge(self, edge: ResourceEdge) -> None:
|
||||
self.edges.append(edge)
|
||||
|
||||
def node_ids(self) -> set:
|
||||
return {n.id for n in self.nodes}
|
||||
@@ -73,7 +73,7 @@ secrets:
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY:
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT: 86400
|
||||
|
||||
releaseConfigRoot: /home/prowler/.cache/pypoetry/virtualenvs/prowler-api-NnJNioq7-py3.12/lib/python3.12/site-packages/
|
||||
releaseConfigRoot: /home/prowler/.venv/lib/python3.12/site-packages/
|
||||
releaseConfigPath: prowler/config/config.yaml
|
||||
|
||||
mainConfig:
|
||||
|
||||
@@ -48,10 +48,16 @@ services:
|
||||
- path: .env
|
||||
required: false
|
||||
ports:
|
||||
- ${UI_PORT:-3000}:${UI_PORT:-3000}
|
||||
- ${UI_PORT:-3000}:3000
|
||||
depends_on:
|
||||
mcp-server:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q -O /dev/null http://127.0.0.1:3000/api/health || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine3.20
|
||||
|
||||
@@ -467,7 +467,7 @@ Effective headers and section titles enhance document readability and structure,
|
||||
|
||||
* **Example:**
|
||||
* How to Clone and Install Prowler from GitHub (header: Title case)
|
||||
* How to install poetry dependencies (subheading: Sentence case)
|
||||
* How to install uv dependencies (subheading: Sentence case)
|
||||
5. **Using Keywords in Headers**
|
||||
Headers should include relevant keywords to improve document searchability:
|
||||
* **Good:** Scanning AWS Accounts in Parallel
|
||||
|
||||
@@ -10,10 +10,10 @@ This repository contains the Prowler Open Source documentation powered by [Mintl
|
||||
|
||||
## Local Development
|
||||
|
||||
Install the [Mintlify CLI](https://www.npmjs.com/package/mint) to preview documentation changes locally:
|
||||
Install a reviewed version of the [Mintlify CLI](https://www.npmjs.com/package/mint) to preview documentation changes locally:
|
||||
|
||||
```bash
|
||||
npm i -g mint
|
||||
npm install --global mint@4.2.560
|
||||
```
|
||||
|
||||
Run the following command at the root of your documentation (where `mint.json` is located):
|
||||
|
||||
@@ -20,8 +20,8 @@ The most common high level steps to create a new check are:
|
||||
3. Create a check-specific folder. The path should follow this pattern: `prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>`. Adhere to the [Naming Format for Checks](#naming-format-for-checks).
|
||||
4. Populate the folder with files as specified in [File Creation](#file-creation).
|
||||
5. Run the check locally to ensure it works as expected. For checking you can use the CLI in the next way:
|
||||
- To ensure the check has been detected by Prowler: `poetry run python prowler-cli.py <provider> --list-checks | grep <check_name>`.
|
||||
- To run the check, to find possible issues: `poetry run python prowler-cli.py <provider> --log-level ERROR --verbose --check <check_name>`.
|
||||
- To ensure the check has been detected by Prowler: `uv run python prowler-cli.py <provider> --list-checks | grep <check_name>`.
|
||||
- To run the check, to find possible issues: `uv run python prowler-cli.py <provider> --log-level ERROR --verbose --check <check_name>`.
|
||||
6. Create comprehensive tests for the check that cover multiple scenarios including both PASS (compliant) and FAIL (non-compliant) cases. For detailed information about test structure and implementation guidelines, refer to the [Testing](/developer-guide/unit-testing) documentation.
|
||||
7. If the check and its corresponding tests are working as expected, you can submit a PR to Prowler.
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ This includes the [AGENTS.md](https://github.com/prowler-cloud/prowler/blob/mast
|
||||
<Steps>
|
||||
<Step title="Install Mintlify CLI">
|
||||
```bash
|
||||
npm i -g mint
|
||||
npm install --global mint@4.2.560
|
||||
```
|
||||
For detailed instructions, check the [Mintlify documentation](https://www.mintlify.com/docs/installation).
|
||||
</Step>
|
||||
|
||||
@@ -80,7 +80,7 @@ Before proceeding, ensure the following:
|
||||
|
||||
- Git is installed.
|
||||
- Python 3.10 or higher is installed.
|
||||
- `poetry` is installed to manage dependencies.
|
||||
- `uv` is installed to manage dependencies.
|
||||
|
||||
### Forking the Prowler Repository
|
||||
|
||||
@@ -97,28 +97,21 @@ cd prowler
|
||||
|
||||
### Dependency Management and Environment Isolation
|
||||
|
||||
To prevent conflicts between environments, we recommend using `poetry`, a Python dependency management solution. Install it by following the [instructions](https://python-poetry.org/docs/#installation).
|
||||
To prevent conflicts between environments, we recommend using [`uv`](https://docs.astral.sh/uv/), a fast Python package and project manager. Install it by following the [official instructions](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
|
||||
### Installing Dependencies
|
||||
|
||||
To install all required dependencies, including those needed for development, run:
|
||||
|
||||
```
|
||||
poetry install --with dev
|
||||
eval $(poetry env activate)
|
||||
uv sync
|
||||
source .venv/bin/activate
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
In case you have any doubts, consult the [Poetry environment activation guide](https://python-poetry.org/docs/managing-environments/#activating-the-environment).
|
||||
|
||||
</Warning>
|
||||
|
||||
|
||||
### Pre-Commit Hooks
|
||||
|
||||
This repository uses Git pre-commit hooks managed by the [prek](https://prek.j178.dev/) tool, it is installed with `poetry install --with dev`. Next, run the following command in the root of this repository:
|
||||
This repository uses Git pre-commit hooks managed by the [prek](https://prek.j178.dev/) tool, it is installed with `uv sync`. Next, run the following command in the root of this repository:
|
||||
|
||||
```shell
|
||||
prek install
|
||||
@@ -155,11 +148,11 @@ Once installed, TruffleHog runs before each push and blocks the operation when v
|
||||
Before merging pull requests, several automated checks and utilities ensure code security and updated dependencies:
|
||||
|
||||
<Note>
|
||||
These should have been already installed if `poetry install --with dev` was already run.
|
||||
These should have been already installed if `uv sync` was already run.
|
||||
|
||||
</Note>
|
||||
- [`bandit`](https://pypi.org/project/bandit/) for code security review.
|
||||
- [`safety`](https://pypi.org/project/safety/) and [`dependabot`](https://github.com/features/security) for dependencies.
|
||||
- [`osv-scanner`](https://github.com/google/osv-scanner) and [`dependabot`](https://github.com/features/security) for dependencies.
|
||||
- [`hadolint`](https://github.com/hadolint/hadolint) and [`dockle`](https://github.com/goodwithtech/dockle) for container security.
|
||||
- [`Snyk`](https://docs.snyk.io/integrations/snyk-container-integrations/container-security-with-docker-hub-integration) for container security in Docker Hub.
|
||||
- [`clair`](https://github.com/quay/clair) for container security in Amazon ECR.
|
||||
@@ -183,7 +176,7 @@ These resources help ensure that AI-assisted contributions maintain consistency
|
||||
|
||||
All dependencies are listed in the `pyproject.toml` file.
|
||||
|
||||
The SDK keeps direct dependencies pinned to exact versions, while `poetry.lock` records the full resolved dependency tree and the artifact hashes for every package. Use `poetry install` from the lock file instead of ad-hoc `pip` installs when you need a reproducible environment.
|
||||
The SDK keeps direct dependencies pinned to exact versions, while `uv.lock` records the full resolved dependency tree and the artifact hashes for every package. Use `uv sync` from the lock file instead of ad-hoc `pip` installs when you need a reproducible environment.
|
||||
|
||||
For proper code documentation, refer to the following and follow the code documentation practices presented there: [Google Python Style Guide - Comments and Docstrings](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings).
|
||||
|
||||
@@ -209,8 +202,8 @@ prowler/
|
||||
├── contrib/ # Community-contributed scripts or modules
|
||||
├── kubernetes/ # Kubernetes deployment files
|
||||
├── .github/ # GitHub-related files (workflows, issue templates, etc.)
|
||||
├── pyproject.toml # Python project configuration (Poetry)
|
||||
├── poetry.lock # Poetry lock file
|
||||
├── pyproject.toml # Python project configuration (uv)
|
||||
├── uv.lock # uv lock file
|
||||
├── README.md # Project overview and getting started
|
||||
├── Makefile # Common development commands
|
||||
├── Dockerfile # SDK Docker container
|
||||
|
||||
@@ -1277,10 +1277,12 @@ Dependencies ensure that your provider's required libraries are available when P
|
||||
**File:** `pyproject.toml`
|
||||
|
||||
```toml
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<3.13"
|
||||
# ... other dependencies
|
||||
your-sdk-library = "^1.0.0" # Add your SDK dependency
|
||||
[project]
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
# ... other dependencies
|
||||
"your-sdk-library>=1.0.0,<2.0.0", # Add your SDK dependency
|
||||
]
|
||||
```
|
||||
|
||||
#### Step 18: Create Tests
|
||||
|
||||
@@ -228,7 +228,7 @@ Each requirement links to the Prowler checks that, together, produce a PASS or F
|
||||
To discover available checks, run:
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> --list-checks
|
||||
uv run python prowler-cli.py <provider> --list-checks
|
||||
```
|
||||
|
||||
## Supporting Multiple Providers
|
||||
@@ -295,7 +295,7 @@ Follow the steps below before opening a pull request.
|
||||
### 1. Run the Compliance Model Validator
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> --list-compliance
|
||||
uv run python prowler-cli.py <provider> --list-compliance
|
||||
```
|
||||
|
||||
The framework must appear in the output. A validation error indicates a schema mismatch between the JSON file and the attribute model.
|
||||
@@ -303,7 +303,7 @@ The framework must appear in the output. A validation error indicates a schema m
|
||||
### 2. Run a Scan Filtered by the Framework
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> \
|
||||
uv run python prowler-cli.py <provider> \
|
||||
--compliance <framework>_<version>_<provider> \
|
||||
--log-level ERROR
|
||||
```
|
||||
@@ -336,7 +336,7 @@ Compliance contributions require two layers of tests.
|
||||
Run the suite with:
|
||||
|
||||
```bash
|
||||
poetry run pytest -n auto tests/lib/check/universal_compliance_models_test.py \
|
||||
uv run pytest -n auto tests/lib/check/universal_compliance_models_test.py \
|
||||
tests/lib/outputs/compliance/
|
||||
```
|
||||
|
||||
@@ -348,8 +348,8 @@ Before opening the pull request:
|
||||
|
||||
1. Run the complete QA pipeline:
|
||||
```bash
|
||||
poetry run pre-commit run --all-files
|
||||
poetry run pytest -n auto
|
||||
uv run pre-commit run --all-files
|
||||
uv run pytest -n auto
|
||||
```
|
||||
2. Add a changelog entry under the `### 🚀 Added` section of `prowler/CHANGELOG.md`, describing the new framework and the providers it covers.
|
||||
3. Follow the [Pull Request Template](https://github.com/prowler-cloud/prowler/blob/master/.github/pull_request_template.md) and set the PR title using Conventional Commits, for example `feat(compliance): add My Framework 1.0 for AWS`.
|
||||
|
||||
@@ -23,7 +23,7 @@ Within this folder the following files are also to be created:
|
||||
- `<new_service_name>_service.py` – Contains all the logic and API calls of the service.
|
||||
- `<new_service_name>_client_.py` – Contains the initialization of the freshly created service's class so that the checks can use it.
|
||||
|
||||
Once the files are create, you can check that the service has been created by running the following command: `poetry run python prowler-cli.py <provider> --list-services | grep <new_service_name>`.
|
||||
Once the files are create, you can check that the service has been created by running the following command: `uv run python prowler-cli.py <provider> --list-services | grep <new_service_name>`.
|
||||
|
||||
## Service Structure and Initialisation
|
||||
|
||||
|
||||
@@ -332,6 +332,13 @@
|
||||
"user-guide/providers/vercel/getting-started-vercel",
|
||||
"user-guide/providers/vercel/authentication"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Okta",
|
||||
"pages": [
|
||||
"user-guide/providers/okta/getting-started-okta",
|
||||
"user-guide/providers/okta/authentication"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@@ -10,7 +10,7 @@ Complete reference guide for all tools available in the Prowler MCP Server. Tool
|
||||
|----------|------------|------------------------|
|
||||
| Prowler Hub | 10 tools | No |
|
||||
| Prowler Documentation | 2 tools | No |
|
||||
| Prowler Cloud/App | 29 tools | Yes |
|
||||
| Prowler Cloud/App | 32 tools | Yes |
|
||||
|
||||
## Tool Naming Convention
|
||||
|
||||
@@ -36,6 +36,14 @@ Tools for searching, viewing, and analyzing security findings across all cloud p
|
||||
- **`prowler_app_get_finding_details`** - Get comprehensive details about a specific finding including remediation guidance, check metadata, and resource relationships
|
||||
- **`prowler_app_get_findings_overview`** - Get aggregate statistics and trends about security findings as a markdown report
|
||||
|
||||
### Finding Groups Management
|
||||
|
||||
Tools for listing finding groups aggregated by check ID, viewing complete group counters, and drilling down into affected resources.
|
||||
|
||||
- **`prowler_app_list_finding_groups`** - List latest or historical finding groups with filters for provider, region, service, resource, category, check, severity, status, muted state, delta, date range, and sorting
|
||||
- **`prowler_app_get_finding_group_details`** - Get complete details for a specific finding group including counters, description, timestamps, and impacted providers
|
||||
- **`prowler_app_list_finding_group_resources`** - List actionable unmuted resources affected by a finding group by default, including nested resource and provider data plus the `finding_id` for remediation details. Set `include_muted` to include suppressed resources
|
||||
|
||||
### Provider Management
|
||||
|
||||
Tools for managing cloud provider connections in Prowler.
|
||||
|
||||
@@ -44,13 +44,21 @@ Choose the configuration based on your deployment:
|
||||
|
||||
<Tab title="Generic without Native HTTP Support">
|
||||
**Configuration:**
|
||||
<Warning>
|
||||
Avoid configuring MCP clients to run `npx mcp-remote` directly. `npx` can download and execute a new package version on each run. Install a reviewed version of `mcp-remote` in a dedicated local workspace, then point the MCP client to the installed binary.
|
||||
</Warning>
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "npx",
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp", // or your self-hosted Prowler MCP Server URL
|
||||
"--header",
|
||||
"Authorization: Bearer ${PROWLER_APP_API_KEY}"
|
||||
@@ -72,14 +80,20 @@ Choose the configuration based on your deployment:
|
||||
2. Go to "Developer" tab
|
||||
3. Click in "Edit Config" button
|
||||
4. Edit the `claude_desktop_config.json` file with your favorite editor
|
||||
5. Add the following configuration:
|
||||
5. Install a reviewed version of `mcp-remote` in a dedicated local workspace:
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
6. Add the following configuration:
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "npx",
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer ${PROWLER_APP_API_KEY}"
|
||||
|
||||
@@ -37,8 +37,8 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
_Requirements_:
|
||||
|
||||
- `git` installed.
|
||||
- `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
- `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
- `uv` installed: [uv installation](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
- `pnpm` installed through [Corepack](https://pnpm.io/installation#using-corepack) or the standalone [pnpm installation](https://pnpm.io/installation).
|
||||
- `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
<Warning>
|
||||
@@ -49,8 +49,8 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler \
|
||||
cd prowler/api \
|
||||
poetry install \
|
||||
eval $(poetry env activate) \
|
||||
uv sync \
|
||||
source .venv/bin/activate \
|
||||
set -a \
|
||||
source .env \
|
||||
docker compose up postgres valkey -d \
|
||||
@@ -59,11 +59,6 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
|
||||
If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment. In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
</Warning>
|
||||
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
_Commands to run the API Worker_:
|
||||
@@ -71,8 +66,8 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler \
|
||||
cd prowler/api \
|
||||
poetry install \
|
||||
eval $(poetry env activate) \
|
||||
uv sync \
|
||||
source .venv/bin/activate \
|
||||
set -a \
|
||||
source .env \
|
||||
cd src/backend \
|
||||
@@ -84,8 +79,8 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler \
|
||||
cd prowler/api \
|
||||
poetry install \
|
||||
eval $(poetry env activate) \
|
||||
uv sync \
|
||||
source .venv/bin/activate \
|
||||
set -a \
|
||||
source .env \
|
||||
cd src/backend \
|
||||
@@ -97,9 +92,11 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler \
|
||||
cd prowler/ui \
|
||||
npm install \
|
||||
npm run build \
|
||||
npm start
|
||||
corepack enable \
|
||||
corepack install \
|
||||
pnpm install --frozen-lockfile \
|
||||
pnpm run build \
|
||||
pnpm start
|
||||
```
|
||||
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
@@ -68,7 +68,7 @@ To install Prowler as a Python package, use `Python >= 3.10, <= 3.12`. Prowler i
|
||||
_Requirements for Developers_:
|
||||
|
||||
* `git`
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `uv` installed: [uv installation](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
@@ -76,8 +76,8 @@ To install Prowler as a Python package, use `Python >= 3.10, <= 3.12`. Prowler i
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry install
|
||||
poetry run python prowler-cli.py -v
|
||||
uv sync
|
||||
uv run python prowler-cli.py -v
|
||||
```
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -47,11 +47,12 @@ Prowler supports a wide range of providers organized by category:
|
||||
| Provider | Support | Audit Scope/Entities | Interface |
|
||||
| ----------------------------------------------------------------------------------------- | -------- | ---------------------------- | ------------ |
|
||||
| [GitHub](/user-guide/providers/github/getting-started-github) | Official | Organizations / Repositories | UI, API, CLI |
|
||||
| [Google Workspace](/user-guide/providers/googleworkspace/getting-started-googleworkspace) | Official | Domains | CLI |
|
||||
| [Google Workspace](/user-guide/providers/googleworkspace/getting-started-googleworkspace) | Official | Domains | UI, API, CLI |
|
||||
| [LLM](/user-guide/providers/llm/getting-started-llm) | Official | Models | CLI |
|
||||
| [M365](/user-guide/providers/microsoft365/getting-started-m365) | Official | Tenants | UI, API, CLI |
|
||||
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | Organizations | UI, API, CLI |
|
||||
| [Vercel](/user-guide/providers/vercel/getting-started-vercel) | Official | Teams / Projects | CLI |
|
||||
| [Okta](/user-guide/providers/okta/getting-started-okta) | Official | Organizations | CLI |
|
||||
| [Vercel](/user-guide/providers/vercel/getting-started-vercel) | Official | Teams / Projects | UI, API, CLI |
|
||||
|
||||
### Kubernetes
|
||||
|
||||
|
||||
@@ -39,10 +39,11 @@ Dependencies are continuously monitored for known vulnerabilities with timely up
|
||||
|
||||
### Dependency Vulnerability Scanning
|
||||
|
||||
- **Safety:** Scans Python dependencies against known vulnerability databases
|
||||
- Runs on every commit via pre-commit hooks
|
||||
- Integrated into CI/CD for SDK and API
|
||||
- Configured with selective ignores for tracked exceptions
|
||||
- **osv-scanner:** Scans lockfiles against the [OSV.dev](https://osv.dev) vulnerability database
|
||||
- Runs in CI on every pull request and push for SDK, API, and UI
|
||||
- Fails the build on `HIGH`, `CRITICAL`, and `UNKNOWN` severity findings
|
||||
- Posts a per-lockfile report as a PR comment
|
||||
- Per-vulnerability ignores (with reason and expiry) live in `osv-scanner.toml` at the repo root
|
||||
- **Trivy:** Multi-purpose scanner for containers and dependencies
|
||||
- Scans all container images (UI, API, SDK, MCP Server)
|
||||
- Checks for vulnerabilities in OS packages and application dependencies
|
||||
|
||||
@@ -158,6 +158,15 @@ The following list includes all the Vercel checks with configurable variables th
|
||||
| `team_member_role_least_privilege` | `max_owners` | Integer |
|
||||
| `team_no_stale_invitations` | `stale_invitation_threshold_days` | Integer |
|
||||
|
||||
## Okta
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Okta checks with configurable variables that can be changed in the configuration YAML file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|------------------------------------|---------|
|
||||
| `signon_global_session_idle_timeout_15min` | `okta_max_session_idle_minutes` | Integer |
|
||||
|
||||
## Config YAML File Structure
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -18,9 +18,11 @@ prowler <provider> --scan-unused-services
|
||||
|
||||
#### ACM (AWS Certificate Manager)
|
||||
|
||||
Certificates stored in ACM without active usage in AWS resources are excluded. By default, Prowler only scans actively used certificates. Unused certificates will not be checked if they are expired, if their expiring date is near or if they are good.
|
||||
Certificates stored in ACM without active usage in AWS resources are excluded. By default, Prowler only scans actively used certificates. Unused certificates are not evaluated for expiration, transparency logging, or weak key algorithms.
|
||||
|
||||
- `acm_certificates_expiration_check`
|
||||
- `acm_certificates_transparency_logs_enabled`
|
||||
- `acm_certificates_with_secure_key_algorithms`
|
||||
|
||||
#### Athena
|
||||
|
||||
@@ -28,6 +30,13 @@ Upon AWS account creation, Athena provisions a default primary workgroup for the
|
||||
|
||||
- `athena_workgroup_encryption`
|
||||
- `athena_workgroup_enforce_configuration`
|
||||
- `athena_workgroup_logging_enabled`
|
||||
|
||||
#### Amazon Bedrock
|
||||
|
||||
Generative AI workloads benefit from private VPC endpoint connectivity to keep prompt and model traffic off the public internet. Prowler only evaluates this configuration for VPCs in use (with active ENIs).
|
||||
|
||||
- `bedrock_vpc_endpoints_configured`
|
||||
|
||||
#### AWS CloudTrail
|
||||
|
||||
@@ -38,15 +47,23 @@ AWS CloudTrail should have at least one trail with a data event to record all S3
|
||||
|
||||
#### AWS Elastic Compute Cloud (EC2)
|
||||
|
||||
If Amazon Elastic Block Store (EBS) default encyption is not enabled, sensitive data at rest will remain unprotected in EC2. However, Prowler will only generate a finding if EBS volumes exist where default encryption could be enforced.
|
||||
If Amazon Elastic Block Store (EBS) default encryption is not enabled, sensitive data at rest remains unprotected in EC2. Prowler only generates a finding if EBS volumes exist where default encryption could be enforced.
|
||||
|
||||
- `ec2_ebs_default_encryption`
|
||||
|
||||
**EBS Snapshot Public Access**: Public EBS snapshots can leak data. Prowler only evaluates the account-level block setting if EBS snapshots exist in the account.
|
||||
|
||||
- `ec2_ebs_snapshot_account_block_public_access`
|
||||
|
||||
**EC2 Instance Metadata Service (IMDS)**: Enforcing IMDSv2 at the account level mitigates SSRF-based credential theft. Prowler only evaluates the account-level setting if EC2 instances exist in the account.
|
||||
|
||||
- `ec2_instance_account_imdsv2_enabled`
|
||||
|
||||
**Security Groups**: Misconfigured security groups increase the attack surface.
|
||||
|
||||
Prowler scans only attached security groups to report vulnerabilities in actively used configurations. Applies to:
|
||||
|
||||
- 15 security group-related checks, including open ports and ingress/egress traffic rules.
|
||||
- 20 security group-related checks, including open ports and ingress/egress traffic rules.
|
||||
|
||||
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X`
|
||||
- `ec2_securitygroup_default_restrict_traffic`
|
||||
@@ -56,6 +73,18 @@ Prowler scans only attached security groups to report vulnerabilities in activel
|
||||
|
||||
- `ec2_networkacl_allow_ingress_X_port`
|
||||
|
||||
#### AWS Identity and Access Management (IAM)
|
||||
|
||||
Customer-managed IAM policies that are not attached to any user, group, or role grant no effective permissions until a principal is bound to them. Prowler treats such policies as dormant by default and skips the content-evaluation checks below when `--scan-unused-services` is not set. Enable the flag to surface findings on unattached policies as well.
|
||||
|
||||
- `iam_policy_allows_privilege_escalation`
|
||||
- `iam_policy_no_full_access_to_cloudtrail`
|
||||
- `iam_policy_no_full_access_to_kms`
|
||||
- `iam_policy_no_wildcard_marketplace_subscribe`
|
||||
- `iam_no_custom_policy_permissive_role_assumption`
|
||||
|
||||
The dedicated `iam_customer_unattached_policy_no_administrative_privileges` check still inspects unattached policies regardless of the flag, since its purpose is to highlight dormant administrator privileges.
|
||||
|
||||
#### AWS Glue
|
||||
|
||||
AWS Glue best practices recommend encrypting metadata and connection passwords in Data Catalogs.
|
||||
@@ -71,6 +100,12 @@ Amazon Inspector is a vulnerability discovery service that automates continuous
|
||||
|
||||
- `inspector2_is_enabled`
|
||||
|
||||
#### AWS Key Management Service (KMS)
|
||||
|
||||
Customer managed Customer Master Keys (CMKs) in the `Disabled` state cannot be used for cryptographic operations, so Prowler skips the unintentional-deletion check on them by default. Enable the flag to evaluate disabled CMKs as well.
|
||||
|
||||
- `kms_cmk_not_deleted_unintentionally`
|
||||
|
||||
#### Amazon Macie
|
||||
|
||||
Amazon Macie leverages machine learning to automatically discover, classify, and protect sensitive data in S3 buckets. Prowler only generates findings if Macie is disabled and there are S3 buckets in the AWS account.
|
||||
@@ -83,6 +118,15 @@ A network firewall is essential for monitoring and controlling traffic within a
|
||||
|
||||
- `networkfirewall_in_all_vpc`
|
||||
|
||||
#### Amazon Relational Database Service (RDS)
|
||||
|
||||
RDS event subscriptions notify operators of critical database events. Prowler only evaluates these subscription checks when RDS clusters or instances exist in the account.
|
||||
|
||||
- `rds_cluster_critical_event_subscription`
|
||||
- `rds_instance_critical_event_subscription`
|
||||
- `rds_instance_event_subscription_parameter_groups`
|
||||
- `rds_instance_event_subscription_security_groups`
|
||||
|
||||
#### Amazon S3
|
||||
|
||||
To prevent unintended data exposure:
|
||||
@@ -99,6 +143,10 @@ VPC settings directly impact network security and availability.
|
||||
|
||||
- `vpc_flow_logs_enabled`
|
||||
|
||||
- VPC Endpoint for EC2: Routes EC2 API calls through a private VPC endpoint to keep traffic off the public internet. Prowler only evaluates this configuration for VPCs in use, i.e., those with active ENIs.
|
||||
|
||||
- `vpc_endpoint_for_ec2_enabled`
|
||||
|
||||
- VPC Subnet Public IP Restrictions: Prevent unintended exposure of resources to the internet. Prowler only checks this configuration for VPCs in use, i.e., those with active ENIs.
|
||||
|
||||
- `vpc_subnet_no_public_ip_by_default`
|
||||
|
||||
@@ -27,7 +27,7 @@ To download results from AWS CloudShell:
|
||||
|
||||
## Cloning Prowler from GitHub
|
||||
|
||||
Due to the limited storage in AWS CloudShell's home directory, installing Poetry dependencies for running Prowler from GitHub can be problematic.
|
||||
Due to the limited storage in AWS CloudShell's home directory, installing uv dependencies for running Prowler from GitHub can be problematic.
|
||||
|
||||
The following workaround ensures successful installation:
|
||||
|
||||
@@ -37,17 +37,9 @@ adduser prowler
|
||||
su prowler
|
||||
git clone https://github.com/prowler-cloud/prowler.git
|
||||
cd prowler
|
||||
pip install poetry
|
||||
mkdir /tmp/poetry
|
||||
poetry config cache-dir /tmp/poetry
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
pip install uv
|
||||
mkdir /tmp/uv-cache
|
||||
UV_CACHE_DIR=/tmp/uv-cache uv sync
|
||||
source .venv/bin/activate
|
||||
python prowler-cli.py -v
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
|
||||
If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment. For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
|
||||
</Warning>
|
||||
|
||||
@@ -44,6 +44,15 @@ User API Tokens are the recommended authentication method because they:
|
||||
Create a **User API Token**, not an Account API Token. User API Tokens are created from the profile settings and offer finer permission control.
|
||||
</Note>
|
||||
|
||||
**Quick Setup:** Use these pre-configured links to open the Cloudflare Dashboard with the required permissions already selected:
|
||||
|
||||
- [Create User API Token](https://dash.cloudflare.com/profile/api-tokens?permissionGroupKeys=%5B%7B%22key%22%3A%22account_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22dns%22%2C%22type%22%3A%22read%22%7D%5D&accountId=%2A&zoneId=all&name=Prowler%20Security%20Scanner) — creates a **User API Token** (recommended). Opens the **Create Custom Token** form prefilled with the four required read-only scopes (`Account Settings`, `Zone`, `Zone Settings`, `DNS`) and the name `Prowler Security Scanner`. Adjust **Account Resources** and **Zone Resources** to match the accounts and zones you want to scan, then click **Create Token**.
|
||||
- [Create Account-Owned API Token](https://dash.cloudflare.com/?to=/:account/api-tokens&permissionGroupKeys=%5B%7B%22key%22%3A%22account_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22dns%22%2C%22type%22%3A%22read%22%7D%5D&name=Prowler%20Security%20Scanner) — creates an [account-owned token](https://developers.cloudflare.com/fundamentals/api/how-to/account-owned-token-template/) instead. Use this for automation or CI/CD where the token should not depend on a specific user account remaining active. Requires the **Super Administrator** or **Administrator** role on the account.
|
||||
|
||||
<Note>
|
||||
Template URLs only pre-fill the token creation form. Review the permissions, configure resources, and click **Create Token** to complete the process.
|
||||
</Note>
|
||||
|
||||
### Step 1: Create a User API Token
|
||||
|
||||
1. Log into the [Cloudflare Dashboard](https://dash.cloudflare.com).
|
||||
|
||||
@@ -14,6 +14,15 @@ Set up authentication for Cloudflare with the [Cloudflare Authentication](/user-
|
||||
- Grant the required read-only permissions (`Account Settings:Read`, `Zone:Read`, `Zone Settings:Read`, `DNS:Read`)
|
||||
- Identify the Cloudflare Account ID to use as the provider identifier
|
||||
|
||||
<Note>
|
||||
**Quick Setup:** Use these pre-configured links to create a token with the required permissions already selected:
|
||||
|
||||
- [Create User API Token](https://dash.cloudflare.com/profile/api-tokens?permissionGroupKeys=%5B%7B%22key%22%3A%22account_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22dns%22%2C%22type%22%3A%22read%22%7D%5D&accountId=%2A&zoneId=all&name=Prowler%20Security%20Scanner) — creates a User API Token (recommended).
|
||||
- [Create Account-Owned API Token](https://dash.cloudflare.com/?to=/:account/api-tokens&permissionGroupKeys=%5B%7B%22key%22%3A%22account_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22zone_settings%22%2C%22type%22%3A%22read%22%7D%2C%7B%22key%22%3A%22dns%22%2C%22type%22%3A%22read%22%7D%5D&name=Prowler%20Security%20Scanner) — creates an [account-owned token](https://developers.cloudflare.com/fundamentals/api/how-to/account-owned-token-template/), better suited for automation and CI/CD.
|
||||
|
||||
Both links open the Cloudflare Dashboard with the four required read-only scopes (`Account Settings`, `Zone`, `Zone Settings`, `DNS`) and the name `Prowler Security Scanner` prefilled. See [Cloudflare Authentication](/user-guide/providers/cloudflare/authentication#api-token-recommended) for the equivalent manual steps.
|
||||
</Note>
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Prowler Cloud" icon="cloud" href="#prowler-cloud">
|
||||
Onboard Cloudflare using Prowler Cloud
|
||||
|
||||
@@ -153,8 +153,8 @@ Before running Prowler CLI for GitHub, ensure you have:
|
||||
# Install via pip
|
||||
pip install prowler
|
||||
|
||||
# Or via poetry
|
||||
poetry install
|
||||
# Or via uv (from the cloned repo)
|
||||
uv sync
|
||||
```
|
||||
|
||||
2. **Authentication Credentials**
|
||||
|
||||
@@ -18,7 +18,7 @@ Prowler requests the following read-only OAuth 2.0 scopes:
|
||||
| `https://www.googleapis.com/auth/admin.directory.domain.readonly` | Read access to domain information |
|
||||
| `https://www.googleapis.com/auth/admin.directory.customer.readonly` | Read access to customer information (Customer ID) |
|
||||
| `https://www.googleapis.com/auth/admin.directory.orgunit.readonly` | Read access to organizational unit hierarchy (identifies the root OU for policy filtering) |
|
||||
| `https://www.googleapis.com/auth/cloud-identity.policies.readonly` | Read access to domain-level application policies (required for Calendar service checks) |
|
||||
| `https://www.googleapis.com/auth/cloud-identity.policies.readonly` | Read access to domain-level application policies (required for Calendar, Gmail, Chat, and Drive service checks) |
|
||||
| `https://www.googleapis.com/auth/admin.directory.rolemanagement.readonly` | Read access to admin roles and role assignments |
|
||||
|
||||
<Warning>
|
||||
@@ -40,7 +40,7 @@ In the [Google Cloud Console](https://console.cloud.google.com), select the targ
|
||||
| API | Required For |
|
||||
|-----|--------------|
|
||||
| **Admin SDK API** | Directory service checks (users, roles, domains) |
|
||||
| **Cloud Identity API** | Calendar service checks (domain-level sharing and invitation policies) |
|
||||
| **Cloud Identity API** | Calendar, Gmail, Chat, and Drive service checks (domain-level application policies) |
|
||||
|
||||
For each API:
|
||||
|
||||
@@ -49,7 +49,7 @@ For each API:
|
||||
3. Click **Enable**
|
||||
|
||||
<Note>
|
||||
Both APIs must be enabled in the same GCP project that hosts the Service Account. Calendar checks will return no findings if the Cloud Identity API is not enabled.
|
||||
Both APIs must be enabled in the same GCP project that hosts the Service Account. Calendar, Gmail, Chat, and Drive checks will return no findings if the Cloud Identity API is not enabled.
|
||||
</Note>
|
||||
|
||||
### Step 3: Create a Service Account
|
||||
@@ -176,9 +176,9 @@ If Prowler connects but returns empty results or permission errors for specific
|
||||
- Verify all scopes are authorized in the Admin Console
|
||||
- Ensure the delegated user is an active super administrator
|
||||
|
||||
### Calendar Checks Return No Findings
|
||||
### Policy API Checks Return No Findings
|
||||
|
||||
If the Directory checks run successfully but the Calendar checks (e.g., `calendar_external_sharing_primary_calendar`) return no findings, the Cloud Identity Policy API is not reachable for this Service Account. Verify:
|
||||
If the Directory checks run successfully but the Calendar, Gmail, Chat, or Drive checks return no findings, the Cloud Identity Policy API is not reachable for this Service Account. Verify:
|
||||
|
||||
- The **Cloud Identity API** is enabled in the GCP project hosting the Service Account (Step 2)
|
||||
- The scope `https://www.googleapis.com/auth/cloud-identity.policies.readonly` is included in the Domain-Wide Delegation OAuth scopes list in the Admin Console (Step 5)
|
||||
|
||||
@@ -22,7 +22,7 @@ Install promptfoo using one of the following methods:
|
||||
|
||||
**Using npm:**
|
||||
```bash
|
||||
npm install -g promptfoo
|
||||
npm install --global promptfoo@0.121.11
|
||||
```
|
||||
|
||||
**Using Homebrew (macOS):**
|
||||
|
||||
@@ -46,7 +46,7 @@ Before you begin, ensure you have:
|
||||
```bash
|
||||
pip install prowler
|
||||
# or for development:
|
||||
poetry install
|
||||
uv sync
|
||||
```
|
||||
|
||||
2. **OCI Python SDK** (automatically installed with Prowler):
|
||||
|
||||
@@ -0,0 +1,186 @@
|
||||
---
|
||||
title: 'Okta Authentication in Prowler'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.27.0" />
|
||||
|
||||
Prowler authenticates to Okta as a **service application** using **OAuth 2.0 with a private-key JWT** (Client Credentials grant). The integration is read-only by scope and follows DISA STIG guidance for least-privilege access.
|
||||
|
||||
## Common Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- An Okta organization. The UI examples below use **Identity Engine** terminology such as **Global Session Policy**; Classic Engine exposes equivalent sign-on policy concepts under older naming.
|
||||
- A **Super Administrator** account on that organization for the one-time service-app setup.
|
||||
- An **API Services** app integration created in the Okta Admin Console.
|
||||
|
||||
### Authentication Method Overview
|
||||
|
||||
| Method | Status | Use Case |
|
||||
|---|---|---|
|
||||
| **OAuth 2.0 (private-key JWT)** | Supported | Production scans, CI/CD, Prowler App. |
|
||||
|
||||
The private-key JWT flow is the only supported authentication method in the initial release. The service application proves possession of a private key on every token request; Okta returns a short-lived access token, refreshed automatically by the SDK.
|
||||
|
||||
<Note>
|
||||
If a different authentication method is needed (SSWS API token, OAuth with user delegation, etc.), please open a [feature request](https://github.com/prowler-cloud/prowler/issues/new?template=feature-request.yml) describing the use case.
|
||||
</Note>
|
||||
|
||||
### Required OAuth Scopes
|
||||
|
||||
For the initial check (`signon_global_session_idle_timeout_15min`) only one scope is required:
|
||||
|
||||
- `okta.policies.read`
|
||||
|
||||
Additional scopes will be needed as more services and checks are added, this are the current ones needed:
|
||||
|
||||
| Scope | Used by |
|
||||
|---|---|
|
||||
| `okta.policies.read` | Sign-on / password / authentication policies |
|
||||
|
||||
### Required Admin Role
|
||||
|
||||
The service application must be assigned the built-in **Read-Only Administrator** role.
|
||||
|
||||
Okta's Management API enforces a two-layer authorization model: an OAuth **scope** decides which API endpoints the token can call, and an **admin role** decides whether the call returns data. With only a scope granted, the token mint succeeds but every read returns `403 Forbidden`. The Read-Only Administrator role is the minimum that lets the granted `okta.*.read` scopes actually return configuration data to Prowler's checks — without it, the credential probe at provider startup fails and the scan never gets to evaluate any check.
|
||||
|
||||
Read-Only Administrator is intentionally the narrowest role that satisfies this requirement and aligns with the least-privilege guidance in DISA STIG.
|
||||
|
||||
## Step-by-Step Setup
|
||||
|
||||
### 1. Go to the admin console
|
||||
|
||||

|
||||
|
||||
### 2. [Optional] - Disable the privilege-escalation bypass (org-wide, one-time)
|
||||
|
||||
In the Okta Admin Console, go to **Settings → Account → Public client app admins** and ensure it is **off**. When enabled, every API Services app can be auto-assigned the Super Administrator role after scopes are granted, which would invalidate the read-only premise of this integration.
|
||||
|
||||

|
||||
|
||||
### 3. Create the API Services app
|
||||
|
||||
1. Go to **Applications → Applications**.
|
||||
|
||||

|
||||
|
||||
2. **Create App Integration**
|
||||
|
||||

|
||||
|
||||
3. Sign-in method: **API Services**. Click **Next**.
|
||||
4. Name the app (for example, `Prowler Scanner`) and click **Save**.
|
||||
5. Copy the displayed **Client ID** — you'll use it as `OKTA_CLIENT_ID`.
|
||||
|
||||

|
||||
|
||||
### 4. Switch to private-key authentication and generate a keypair
|
||||
|
||||
On the new app's **General** tab, scroll to **Client Credentials**:
|
||||
|
||||
1. Click **Edit**.
|
||||
2. Set **Client authentication** to **Public key / Private key**.
|
||||
3. Under **Public Keys**, click **Add key**.
|
||||
4. In the modal, click **Generate new key**. Okta creates a JWK pair.
|
||||
5. Click the **PEM** tab to switch the displayed format (or keep JWK — Prowler accepts both).
|
||||
6. Copy the entire `-----BEGIN PRIVATE KEY-----` block (or the JWK JSON).
|
||||
7. Click **Done**, then **Save**.
|
||||
|
||||
<Warning>
|
||||
Okta displays the private key **only once**. If you close the modal without copying, you must generate a new key.
|
||||
</Warning>
|
||||
|
||||

|
||||
|
||||
### 5. Grant the required OAuth scopes
|
||||
|
||||
On the app, open the **Okta API Scopes** tab and click **Grant** on every scope Prowler needs. For the initial release, granting only `okta.policies.read` is sufficient.
|
||||
|
||||

|
||||
|
||||
### 6. Assign the Read-Only Administrator role
|
||||
|
||||
On the app, open the **Admin roles** tab and click **Edit assignments → Add assignment**:
|
||||
|
||||
- **Role:** Read-Only Administrator
|
||||
- **Resources:** All resources
|
||||
|
||||
Save the changes.
|
||||
|
||||

|
||||
|
||||
### 7. [Optional] Verify DPoP setting
|
||||
|
||||
Prowler sends DPoP (Demonstrating Proof of Possession) proofs on every token request. The integration works whether the **Require Demonstrating Proof of Possession (DPoP) header in token requests** setting on the service app is on or off — but enabling it is the more secure default.
|
||||
|
||||
## Prowler CLI Authentication
|
||||
|
||||
### Using Environment Variables (Required for Secrets)
|
||||
|
||||
Private key material **must** be supplied via environment variables — Prowler does not accept secrets through CLI flags.
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="YOUR-ORG.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
|
||||
# Either of the two — content takes precedence over file when both are set.
|
||||
export OKTA_PRIVATE_KEY_FILE="/secure/path/to/prowler-okta.pem"
|
||||
# or
|
||||
export OKTA_PRIVATE_KEY="$(cat /secure/path/to/prowler-okta.pem)"
|
||||
|
||||
# Optional — defaults to "okta.policies.read"
|
||||
export OKTA_SCOPES="okta.policies.read"
|
||||
|
||||
uv run python prowler-cli.py okta
|
||||
```
|
||||
|
||||
### Non-Secret CLI Flags
|
||||
|
||||
Non-secret values are also available as CLI flags for ergonomic overrides:
|
||||
|
||||
| Flag | Equivalent env var |
|
||||
|---|---|
|
||||
| `--okta-org-domain` | `OKTA_ORG_DOMAIN` |
|
||||
| `--okta-client-id` | `OKTA_CLIENT_ID` |
|
||||
| `--okta-scopes` | `OKTA_SCOPES` |
|
||||
|
||||
Run a single check directly:
|
||||
|
||||
```bash
|
||||
uv run python prowler-cli.py okta --check signon_global_session_idle_timeout_15min
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `OktaInvalidOrgDomainError`
|
||||
|
||||
The org domain must be `<org>.okta.com` (or `.oktapreview.com` / `.okta-emea.com` / `.okta-gov.com` / `.okta.mil` / `.okta-miltest.com` / `.trex-govcloud.com`). Pass the bare hostname only — no `https://` scheme, no path, no trailing slash. Custom (vanity) domains are not currently accepted.
|
||||
|
||||
### `OktaPrivateKeyFileError`
|
||||
|
||||
The file at `OKTA_PRIVATE_KEY_FILE` is missing, unreadable, or empty. Confirm the path and that the file contains a non-empty PEM block or JWK JSON document.
|
||||
|
||||
### `OktaInvalidCredentialsError` at provider init
|
||||
|
||||
Prowler validates credentials at startup by listing one sign-on policy. This error indicates the credential material itself was rejected:
|
||||
|
||||
- **`invalid_client`** — the public key registered in Okta does not match the private key on disk. Generate a fresh keypair and try again.
|
||||
|
||||
### `OktaInsufficientPermissionsError` at provider init
|
||||
|
||||
Raised when the credential probe succeeds at the OAuth layer but the request is rejected because the service app lacks the required scope or admin role:
|
||||
|
||||
- **`invalid_scope`** — the `okta.policies.read` scope is not granted on the service app. Grant it from **Okta API Scopes**.
|
||||
- **`Forbidden` / `not authorized`** — the **Read-Only Administrator** role is not assigned to the service app. Assign it from **Admin roles**.
|
||||
|
||||
### `invalid_dpop_proof`
|
||||
|
||||
The org or the service app requires DPoP. The provider always sends DPoP proofs, so this error indicates the SDK could not build a valid proof — typically because the private key on disk does not match the public key uploaded to Okta. Regenerate the keypair.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Implement OAuth 2.0 for an Okta service app](https://developer.okta.com/docs/guides/implement-oauth-for-okta-serviceapp/main/)
|
||||
- [Okta Policy API reference](https://developer.okta.com/docs/api/openapi/okta-management/management/tag/Policy/)
|
||||
- [DISA STIG for Okta (V-273186)](https://stigviewer.com/stigs/okta/)
|
||||
@@ -0,0 +1,144 @@
|
||||
---
|
||||
title: 'Getting Started With Okta on Prowler'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
Prowler for Okta scans an Okta organization for identity and session-management misconfigurations. The provider authenticates as a service application using **OAuth 2.0 with a private-key JWT** (Client Credentials grant) — no end-user login, read-only by scope.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Set up authentication for Okta with the [Okta Authentication](/user-guide/providers/okta/authentication) guide before starting:
|
||||
|
||||
- An Okta organization. The UI examples below use **Identity Engine** terminology such as **Global Session Policy**; Classic Engine exposes the equivalent sign-on policy concepts under older names.
|
||||
- A **Super Administrator** account on that organization for the one-time service-app setup.
|
||||
- An **API Services** app integration in the Okta Admin Console with the `okta.policies.read` scope granted and the **Read-Only Administrator** role assigned.
|
||||
- Python 3.10+ and Prowler 5.27.0 or later installed locally.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Prowler Cloud" icon="cloud" href="#prowler-cloud">
|
||||
Onboard Okta using Prowler Cloud
|
||||
</Card>
|
||||
<Card title="Prowler CLI" icon="terminal" href="#prowler-cli">
|
||||
Onboard Okta using Prowler CLI
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Prowler Cloud
|
||||
|
||||
<Note>
|
||||
Prowler Cloud onboarding for Okta is coming soon. Track the [Prowler GitHub repository](https://github.com/prowler-cloud/prowler) for release updates. Use the [Prowler CLI](#prowler-cli) workflow below in the meantime.
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
<VersionBadge version="5.27.0" />
|
||||
|
||||
### Step 1: Set Up Authentication
|
||||
|
||||
Follow the [Okta Authentication](/user-guide/providers/okta/authentication) guide to create the service application, generate a keypair, grant scopes, and assign the Read-Only Administrator role. Then export the credentials:
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="acme.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
export OKTA_PRIVATE_KEY_FILE="/secure/path/to/prowler-okta.pem"
|
||||
# Optional — defaults to "okta.policies.read"
|
||||
export OKTA_SCOPES="okta.policies.read"
|
||||
```
|
||||
|
||||
The private key file may contain either a PEM-encoded RSA key or a JWK JSON document.
|
||||
|
||||
#### Supplying the Private Key as Content
|
||||
|
||||
For automated environments where writing the key to disk is not desirable (CI runners, container secrets, etc.), the private key may be passed directly as a string:
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="acme.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
export OKTA_PRIVATE_KEY="$(cat /secure/path/to/prowler-okta.pem)"
|
||||
```
|
||||
|
||||
`OKTA_PRIVATE_KEY` takes precedence over `OKTA_PRIVATE_KEY_FILE` when both are set. The private key is intentionally not exposed as a CLI flag — secrets must be supplied via environment variables only.
|
||||
|
||||
### Step 2: Run the First Scan
|
||||
|
||||
Run a baseline scan after credentials are configured:
|
||||
|
||||
```bash
|
||||
prowler okta
|
||||
```
|
||||
|
||||
Or run a specific check directly:
|
||||
|
||||
```bash
|
||||
prowler okta --check signon_global_session_idle_timeout_15min
|
||||
```
|
||||
|
||||
Prowler prints a summary table; full findings are written to the configured output formats.
|
||||
|
||||
### Step 3: Use a Custom Configuration (Optional)
|
||||
|
||||
Prowler uses a configuration file to customize check thresholds. The Okta configuration currently includes:
|
||||
|
||||
```yaml
|
||||
okta:
|
||||
# okta.signon_global_session_idle_timeout_15min
|
||||
# Defaults to 15 minutes per DISA STIG V-273186.
|
||||
okta_max_session_idle_minutes: 15
|
||||
```
|
||||
|
||||
To use a custom configuration:
|
||||
|
||||
```bash
|
||||
prowler okta --config-file /path/to/config.yaml
|
||||
```
|
||||
|
||||
## Supported Services
|
||||
|
||||
Prowler for Okta includes security checks across the following services:
|
||||
|
||||
| Service | Description |
|
||||
| ----------- | ----------------------------------------------------------------------------------- |
|
||||
| **Sign-On** | Global session policy controls (idle timeout, lifetime, rule priority and ordering) |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### STIG Rule Ordering
|
||||
|
||||
The initial check is mapped to DISA STIG `V-273186` / `OKTA-APP-000020`. Prowler implements the STIG procedure as written: the **Default Policy** must have a **Priority 1** rule that is **not** `Default Rule`, and that rule must set **Maximum Okta global session idle time** to 15 minutes or less.
|
||||
|
||||
This is stricter than simply finding the same timeout value somewhere else in the policy set. A compliant custom rule in another policy, or a compliant timeout on the built-in `Default Rule`, does not satisfy this STIG procedure.
|
||||
|
||||
### Default Scopes
|
||||
|
||||
Prowler requests a fixed set of OAuth scopes on every token exchange. The default is a single scope that covers the bundled initial check:
|
||||
|
||||
- `okta.policies.read`
|
||||
|
||||
The service app must have that scope granted in the **Okta API Scopes** tab. When the granted set is narrower than the requested set, the token request fails with an `invalid_scope` error and the scan stops at provider initialization.
|
||||
|
||||
When additional checks are enabled — or when running against a service app that exposes a different scope set — override the default with `OKTA_SCOPES` (comma-separated string for the env var) or `--okta-scopes` (space-separated list for the CLI):
|
||||
|
||||
```bash
|
||||
# Environment variable — comma-separated
|
||||
export OKTA_SCOPES="okta.policies.read,okta.apps.read,okta.users.read"
|
||||
|
||||
# CLI flag — space-separated
|
||||
prowler okta --okta-scopes okta.policies.read okta.apps.read okta.users.read
|
||||
```
|
||||
|
||||
For the full catalog of OAuth scopes exposed by the Okta Management API, refer to the [Okta OAuth 2.0 scopes documentation](https://developer.okta.com/docs/api/oauth2/).
|
||||
|
||||
<Note>
|
||||
As new services and checks land in the Okta provider, the default scope list grows alongside them. Re-check the granted scopes on the service app after each Prowler upgrade and grant any newly required `okta.*.read` scopes in the Admin Console.
|
||||
</Note>
|
||||
|
||||
### Common Errors
|
||||
|
||||
- **`OktaInvalidOrgDomainError`** — the org domain must be `<org>.okta.com` (or `.oktapreview.com` / `.okta-emea.com` / `.okta-gov.com` / `.okta.mil` / `.okta-miltest.com` / `.trex-govcloud.com`). Pass the bare hostname only — no `https://` scheme, no path, no trailing slash.
|
||||
- **`OktaPrivateKeyFileError`** — confirm the file is readable and contains a non-empty PEM or JWK body.
|
||||
- **`OktaInsufficientPermissionsError`** — the credential probe reached Okta but the service app cannot perform the request. The error string carries `invalid_scope`, `Forbidden`, `not authorized`, or `permission`. Fix by granting the missing `okta.*.read` scope from **Okta API Scopes** and confirming the **Read-Only Administrator** role is assigned to the service app.
|
||||
- **`OktaInvalidCredentialsError`** — the credential probe reached Okta but Okta rejected the JWT. Typically the private key on disk does not match the public JWK uploaded to the service app, or the JWT signing parameters are wrong. Regenerate the keypair and re-upload the public JWK.
|
||||
- **Token requests failing for an unknown scope** — the app was granted a narrower scope set than `OKTA_SCOPES` requests. Either narrow `OKTA_SCOPES` or grant the missing scopes in the Admin Console.
|
||||
|
After Width: | Height: | Size: 159 KiB |
|
After Width: | Height: | Size: 134 KiB |
|
After Width: | Height: | Size: 173 KiB |
|
After Width: | Height: | Size: 127 KiB |
|
After Width: | Height: | Size: 83 KiB |
|
After Width: | Height: | Size: 78 KiB |
|
After Width: | Height: | Size: 216 KiB |
|
After Width: | Height: | Size: 56 KiB |
@@ -4,6 +4,10 @@ All notable changes to the **Prowler MCP Server** are documented in this file.
|
||||
|
||||
## [0.7.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- MCP Server tools for Prowler Finding Groups Management [(#11140)](https://github.com/prowler-cloud/prowler/pull/11140)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `cryptography` from 46.0.1 to 47.0.0 (transitive) for CVE-2026-39892 and CVE-2026-26007 / CVE-2026-34073 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
|
||||
Full access to Prowler Cloud platform and self-managed Prowler App for:
|
||||
- **Findings Analysis**: Query, filter, and analyze security findings across all your cloud environments
|
||||
- **Finding Groups Analysis**: Triage findings grouped by check ID and drill down into affected resources
|
||||
- **Provider Management**: Create, configure, and manage your configured Prowler providers (AWS, Azure, GCP, etc.)
|
||||
- **Scan Orchestration**: Trigger on-demand scans and schedule recurring security assessments
|
||||
- **Resource Inventory**: Search and view detailed information about your audited resources
|
||||
@@ -56,13 +57,21 @@ Prowler MCP Server can be used in three ways:
|
||||
- Managed and maintained by Prowler team
|
||||
- Always up-to-date
|
||||
|
||||
Install a reviewed version of `mcp-remote` in a dedicated local workspace first. Avoid running `npx mcp-remote` directly because it can download and execute a new package version on each run.
|
||||
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "npx",
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer pk_YOUR_API_KEY_HERE"
|
||||
|
||||
@@ -0,0 +1,300 @@
|
||||
"""Pydantic models for Prowler Finding Groups responses."""
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin
|
||||
|
||||
|
||||
FindingStatus = Literal["FAIL", "PASS", "MANUAL"]
|
||||
FindingSeverity = Literal["critical", "high", "medium", "low", "informational"]
|
||||
FindingDelta = Literal["new", "changed"]
|
||||
|
||||
|
||||
def _attributes(data: dict) -> dict:
|
||||
return data.get("attributes", {})
|
||||
|
||||
|
||||
def _counter(attributes: dict, key: str) -> int:
|
||||
return attributes.get(key) or 0
|
||||
|
||||
|
||||
def _simplified_group_kwargs(data: dict) -> dict:
|
||||
attributes = _attributes(data)
|
||||
return {
|
||||
"check_id": attributes.get("check_id", data.get("id", "")),
|
||||
"check_title": attributes.get("check_title"),
|
||||
"severity": attributes.get("severity", "informational"),
|
||||
"status": attributes.get("status", "MANUAL"),
|
||||
"muted": attributes.get("muted", False),
|
||||
"impacted_providers": attributes.get("impacted_providers") or [],
|
||||
"resources_fail": _counter(attributes, "resources_fail"),
|
||||
"resources_total": _counter(attributes, "resources_total"),
|
||||
"pass_count": _counter(attributes, "pass_count"),
|
||||
"fail_count": _counter(attributes, "fail_count"),
|
||||
"manual_count": _counter(attributes, "manual_count"),
|
||||
"muted_count": _counter(attributes, "muted_count"),
|
||||
"new_count": _counter(attributes, "new_count"),
|
||||
"changed_count": _counter(attributes, "changed_count"),
|
||||
"first_seen_at": attributes.get("first_seen_at"),
|
||||
"last_seen_at": attributes.get("last_seen_at"),
|
||||
"failing_since": attributes.get("failing_since"),
|
||||
}
|
||||
|
||||
|
||||
class SimplifiedFindingGroup(MinimalSerializerMixin):
|
||||
"""Finding group summary optimized for browsing many checks."""
|
||||
|
||||
check_id: str = Field(description="Public check ID that identifies this group")
|
||||
check_title: str | None = Field(
|
||||
default=None, description="Human-readable check title"
|
||||
)
|
||||
severity: FindingSeverity = Field(description="Highest severity in the group")
|
||||
status: FindingStatus = Field(description="Aggregated finding group status")
|
||||
muted: bool = Field(
|
||||
description="Whether all findings in this group are muted or accepted"
|
||||
)
|
||||
impacted_providers: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="Provider types impacted by this finding group",
|
||||
)
|
||||
resources_fail: int = Field(
|
||||
description="Number of non-muted failing resources in this group", ge=0
|
||||
)
|
||||
resources_total: int = Field(
|
||||
description="Total number of resources in this group", ge=0
|
||||
)
|
||||
pass_count: int = Field(
|
||||
description="Number of non-muted PASS findings in this group", ge=0
|
||||
)
|
||||
fail_count: int = Field(
|
||||
description="Number of non-muted FAIL findings in this group", ge=0
|
||||
)
|
||||
manual_count: int = Field(
|
||||
description="Number of non-muted MANUAL findings in this group", ge=0
|
||||
)
|
||||
muted_count: int = Field(description="Total muted findings in this group", ge=0)
|
||||
new_count: int = Field(description="Number of new non-muted findings", ge=0)
|
||||
changed_count: int = Field(
|
||||
description="Number of changed non-muted findings", ge=0
|
||||
)
|
||||
first_seen_at: str | None = Field(
|
||||
default=None, description="First time this group was detected"
|
||||
)
|
||||
last_seen_at: str | None = Field(
|
||||
default=None, description="Last time this group was detected"
|
||||
)
|
||||
failing_since: str | None = Field(
|
||||
default=None, description="First time this group started failing"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "SimplifiedFindingGroup":
|
||||
"""Transform JSON:API finding group response to simplified format."""
|
||||
return cls(**_simplified_group_kwargs(data))
|
||||
|
||||
|
||||
class DetailedFindingGroup(SimplifiedFindingGroup):
|
||||
"""Finding group with complete counters and descriptive context."""
|
||||
|
||||
check_description: str | None = Field(
|
||||
default=None, description="Description of the check behind this group"
|
||||
)
|
||||
pass_muted_count: int = Field(description="Muted PASS findings", ge=0)
|
||||
fail_muted_count: int = Field(description="Muted FAIL findings", ge=0)
|
||||
manual_muted_count: int = Field(description="Muted MANUAL findings", ge=0)
|
||||
new_fail_count: int = Field(description="New non-muted FAIL findings", ge=0)
|
||||
new_fail_muted_count: int = Field(description="New muted FAIL findings", ge=0)
|
||||
new_pass_count: int = Field(description="New non-muted PASS findings", ge=0)
|
||||
new_pass_muted_count: int = Field(description="New muted PASS findings", ge=0)
|
||||
new_manual_count: int = Field(description="New non-muted MANUAL findings", ge=0)
|
||||
new_manual_muted_count: int = Field(
|
||||
description="New muted MANUAL findings", ge=0
|
||||
)
|
||||
changed_fail_count: int = Field(
|
||||
description="Changed non-muted FAIL findings", ge=0
|
||||
)
|
||||
changed_fail_muted_count: int = Field(
|
||||
description="Changed muted FAIL findings", ge=0
|
||||
)
|
||||
changed_pass_count: int = Field(
|
||||
description="Changed non-muted PASS findings", ge=0
|
||||
)
|
||||
changed_pass_muted_count: int = Field(
|
||||
description="Changed muted PASS findings", ge=0
|
||||
)
|
||||
changed_manual_count: int = Field(
|
||||
description="Changed non-muted MANUAL findings", ge=0
|
||||
)
|
||||
changed_manual_muted_count: int = Field(
|
||||
description="Changed muted MANUAL findings", ge=0
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "DetailedFindingGroup":
|
||||
"""Transform JSON:API finding group response to detailed format."""
|
||||
attributes = _attributes(data)
|
||||
|
||||
return cls(
|
||||
**_simplified_group_kwargs(data),
|
||||
check_description=attributes.get("check_description"),
|
||||
pass_muted_count=_counter(attributes, "pass_muted_count"),
|
||||
fail_muted_count=_counter(attributes, "fail_muted_count"),
|
||||
manual_muted_count=_counter(attributes, "manual_muted_count"),
|
||||
new_fail_count=_counter(attributes, "new_fail_count"),
|
||||
new_fail_muted_count=_counter(attributes, "new_fail_muted_count"),
|
||||
new_pass_count=_counter(attributes, "new_pass_count"),
|
||||
new_pass_muted_count=_counter(attributes, "new_pass_muted_count"),
|
||||
new_manual_count=_counter(attributes, "new_manual_count"),
|
||||
new_manual_muted_count=_counter(attributes, "new_manual_muted_count"),
|
||||
changed_fail_count=_counter(attributes, "changed_fail_count"),
|
||||
changed_fail_muted_count=_counter(attributes, "changed_fail_muted_count"),
|
||||
changed_pass_count=_counter(attributes, "changed_pass_count"),
|
||||
changed_pass_muted_count=_counter(attributes, "changed_pass_muted_count"),
|
||||
changed_manual_count=_counter(attributes, "changed_manual_count"),
|
||||
changed_manual_muted_count=_counter(
|
||||
attributes, "changed_manual_muted_count"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupsListResponse(MinimalSerializerMixin):
|
||||
"""Paginated response for finding group list queries."""
|
||||
|
||||
groups: list[SimplifiedFindingGroup] = Field(
|
||||
description="Finding groups matching the query"
|
||||
)
|
||||
total_num_groups: int = Field(
|
||||
description="Total groups matching the query across all pages", ge=0
|
||||
)
|
||||
total_num_pages: int = Field(description="Total pages available", ge=0)
|
||||
current_page: int = Field(description="Current page number", ge=1)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingGroupsListResponse":
|
||||
"""Transform JSON:API list response to simplified format."""
|
||||
pagination = response.get("meta", {}).get("pagination", {})
|
||||
groups = [
|
||||
SimplifiedFindingGroup.from_api_response(item)
|
||||
for item in response.get("data", [])
|
||||
]
|
||||
|
||||
return cls(
|
||||
groups=groups,
|
||||
total_num_groups=pagination.get("count", len(groups)),
|
||||
total_num_pages=pagination.get("pages", 1),
|
||||
current_page=pagination.get("page", 1),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResourceInfo(MinimalSerializerMixin):
|
||||
"""Nested resource information for a finding group row."""
|
||||
|
||||
uid: str = Field(description="Provider-native resource UID")
|
||||
name: str = Field(description="Resource name")
|
||||
service: str = Field(description="Cloud service")
|
||||
region: str = Field(description="Cloud region")
|
||||
type: str = Field(description="Resource type")
|
||||
resource_group: str | None = Field(
|
||||
default=None, description="Provider resource group or equivalent"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupResourceInfo":
|
||||
"""Transform nested resource data to simplified format."""
|
||||
return cls(
|
||||
uid=data.get("uid", ""),
|
||||
name=data.get("name", ""),
|
||||
service=data.get("service", ""),
|
||||
region=data.get("region", ""),
|
||||
type=data.get("type", ""),
|
||||
resource_group=data.get("resource_group"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupProviderInfo(MinimalSerializerMixin):
|
||||
"""Nested provider information for a finding group resource row."""
|
||||
|
||||
type: str = Field(description="Provider type")
|
||||
uid: str = Field(description="Provider-native account or subscription ID")
|
||||
alias: str | None = Field(default=None, description="Provider alias")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupProviderInfo":
|
||||
"""Transform nested provider data to simplified format."""
|
||||
return cls(
|
||||
type=data.get("type", ""),
|
||||
uid=data.get("uid", ""),
|
||||
alias=data.get("alias"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResource(MinimalSerializerMixin):
|
||||
"""Resource row affected by a finding group."""
|
||||
|
||||
id: str = Field(description="Row identifier for this finding group resource")
|
||||
resource: FindingGroupResourceInfo = Field(description="Affected resource")
|
||||
provider: FindingGroupProviderInfo = Field(description="Affected provider")
|
||||
finding_id: str = Field(
|
||||
description="Finding UUID to use with prowler_app_get_finding_details"
|
||||
)
|
||||
status: FindingStatus = Field(description="Finding status for this resource")
|
||||
severity: FindingSeverity = Field(description="Finding severity")
|
||||
muted: bool = Field(description="Whether the finding is muted")
|
||||
delta: FindingDelta | None = Field(default=None, description="Change status")
|
||||
first_seen_at: str | None = Field(default=None, description="First seen time")
|
||||
last_seen_at: str | None = Field(default=None, description="Last seen time")
|
||||
muted_reason: str | None = Field(default=None, description="Mute reason")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupResource":
|
||||
"""Transform JSON:API finding group resource response."""
|
||||
attributes = _attributes(data)
|
||||
|
||||
return cls(
|
||||
id=data.get("id", ""),
|
||||
resource=FindingGroupResourceInfo.from_api_response(
|
||||
attributes.get("resource") or {}
|
||||
),
|
||||
provider=FindingGroupProviderInfo.from_api_response(
|
||||
attributes.get("provider") or {}
|
||||
),
|
||||
finding_id=str(attributes.get("finding_id", "")),
|
||||
status=attributes.get("status", "MANUAL"),
|
||||
severity=attributes.get("severity", "informational"),
|
||||
muted=attributes.get("muted", False),
|
||||
delta=attributes.get("delta"),
|
||||
first_seen_at=attributes.get("first_seen_at"),
|
||||
last_seen_at=attributes.get("last_seen_at"),
|
||||
muted_reason=attributes.get("muted_reason"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResourcesListResponse(MinimalSerializerMixin):
|
||||
"""Paginated response for finding group resource queries."""
|
||||
|
||||
resources: list[FindingGroupResource] = Field(
|
||||
description="Resources matching the finding group query"
|
||||
)
|
||||
total_num_resources: int = Field(
|
||||
description="Total resources matching the query across all pages", ge=0
|
||||
)
|
||||
total_num_pages: int = Field(description="Total pages available", ge=0)
|
||||
current_page: int = Field(description="Current page number", ge=1)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingGroupResourcesListResponse":
|
||||
"""Transform JSON:API resource list response to simplified format."""
|
||||
pagination = response.get("meta", {}).get("pagination", {})
|
||||
resources = [
|
||||
FindingGroupResource.from_api_response(item)
|
||||
for item in response.get("data", [])
|
||||
]
|
||||
|
||||
return cls(
|
||||
resources=resources,
|
||||
total_num_resources=pagination.get("count", len(resources)),
|
||||
total_num_pages=pagination.get("pages", 1),
|
||||
current_page=pagination.get("page", 1),
|
||||
)
|
||||
@@ -0,0 +1,473 @@
|
||||
"""Finding Groups tools for Prowler App MCP Server.
|
||||
|
||||
This module provides read-only tools for finding group triage and drill-downs.
|
||||
"""
|
||||
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import quote
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.finding_groups import (
|
||||
DetailedFindingGroup,
|
||||
FindingGroupResourcesListResponse,
|
||||
FindingGroupsListResponse,
|
||||
)
|
||||
from prowler_mcp_server.prowler_app.tools.base import BaseTool
|
||||
|
||||
|
||||
StatusFilter = Literal["FAIL", "PASS", "MANUAL"]
|
||||
SeverityFilter = Literal["critical", "high", "medium", "low", "informational"]
|
||||
DeltaFilter = Literal["new", "changed"]
|
||||
|
||||
GROUP_DETAIL_FIELDS = (
|
||||
"check_id,check_title,check_description,severity,status,muted,"
|
||||
"impacted_providers,resources_fail,resources_total,pass_count,fail_count,"
|
||||
"manual_count,pass_muted_count,fail_muted_count,manual_muted_count,"
|
||||
"muted_count,new_count,changed_count,new_fail_count,new_fail_muted_count,"
|
||||
"new_pass_count,new_pass_muted_count,new_manual_count,new_manual_muted_count,"
|
||||
"changed_fail_count,changed_fail_muted_count,changed_pass_count,"
|
||||
"changed_pass_muted_count,changed_manual_count,changed_manual_muted_count,"
|
||||
"first_seen_at,last_seen_at,failing_since"
|
||||
)
|
||||
|
||||
GROUP_LIST_FIELDS = (
|
||||
"check_id,check_title,severity,status,muted,impacted_providers,"
|
||||
"resources_fail,resources_total,pass_count,fail_count,manual_count,"
|
||||
"muted_count,new_count,changed_count,first_seen_at,last_seen_at,failing_since"
|
||||
)
|
||||
|
||||
RESOURCE_FIELDS = (
|
||||
"resource,provider,finding_id,status,severity,muted,delta,"
|
||||
"first_seen_at,last_seen_at,muted_reason"
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupsTools(BaseTool):
|
||||
"""Tools for Finding Groups operations."""
|
||||
|
||||
@staticmethod
|
||||
def _bool_value(value: bool | str) -> bool:
|
||||
"""Normalize bool-like MCP client values."""
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
return value.lower() == "true"
|
||||
|
||||
@staticmethod
|
||||
def _group_endpoint(date_range: tuple[str, str] | None) -> str:
|
||||
return "/finding-groups/latest" if date_range is None else "/finding-groups"
|
||||
|
||||
@staticmethod
|
||||
def _resource_endpoint(check_id: str, date_range: tuple[str, str] | None) -> str:
|
||||
escaped_check_id = quote(check_id, safe="")
|
||||
if date_range is None:
|
||||
return f"/finding-groups/latest/{escaped_check_id}/resources"
|
||||
return f"/finding-groups/{escaped_check_id}/resources"
|
||||
|
||||
def _base_date_params(
|
||||
self, date_from: str | None, date_to: str | None
|
||||
) -> tuple[tuple[str, str] | None, dict[str, Any]]:
|
||||
date_range = self.api_client.normalize_date_range(
|
||||
date_from, date_to, max_days=2
|
||||
)
|
||||
if date_range is None:
|
||||
return None, {}
|
||||
|
||||
return date_range, {
|
||||
"filter[inserted_at__gte]": date_range[0],
|
||||
"filter[inserted_at__lte]": date_range[1],
|
||||
}
|
||||
|
||||
def _apply_common_filters(
|
||||
self,
|
||||
params: dict[str, Any],
|
||||
provider: list[str],
|
||||
provider_type: list[str],
|
||||
provider_uid: list[str],
|
||||
provider_alias: str | None,
|
||||
region: list[str],
|
||||
service: list[str],
|
||||
resource_type: list[str],
|
||||
resource_name: str | None,
|
||||
resource_uid: str | None,
|
||||
resource_group: list[str],
|
||||
category: list[str],
|
||||
check_id: list[str],
|
||||
check_title: str | None,
|
||||
severity: list[SeverityFilter],
|
||||
status: list[StatusFilter],
|
||||
muted: bool | str | None,
|
||||
delta: list[DeltaFilter],
|
||||
) -> None:
|
||||
if provider:
|
||||
params["filter[provider__in]"] = provider
|
||||
if provider_type:
|
||||
params["filter[provider_type__in]"] = provider_type
|
||||
if provider_uid:
|
||||
params["filter[provider_uid__in]"] = provider_uid
|
||||
if provider_alias:
|
||||
params["filter[provider_alias__icontains]"] = provider_alias
|
||||
if region:
|
||||
params["filter[region__in]"] = region
|
||||
if service:
|
||||
params["filter[service__in]"] = service
|
||||
if resource_type:
|
||||
params["filter[resource_type__in]"] = resource_type
|
||||
if resource_name:
|
||||
params["filter[resource_name__icontains]"] = resource_name
|
||||
if resource_uid:
|
||||
params["filter[resource_uid__icontains]"] = resource_uid
|
||||
if resource_group:
|
||||
params["filter[resource_groups__in]"] = resource_group
|
||||
if category:
|
||||
params["filter[category__in]"] = category
|
||||
if check_id:
|
||||
params["filter[check_id__in]"] = check_id
|
||||
if check_title:
|
||||
params["filter[check_title__icontains]"] = check_title
|
||||
if severity:
|
||||
params["filter[severity__in]"] = severity
|
||||
if status:
|
||||
params["filter[status__in]"] = status
|
||||
if muted is not None:
|
||||
params["filter[muted]"] = self._bool_value(muted)
|
||||
if delta:
|
||||
params["filter[delta__in]"] = delta
|
||||
|
||||
async def list_finding_groups(
|
||||
self,
|
||||
provider: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider UUIDs. Multiple values allowed. If empty, all visible providers are returned.",
|
||||
),
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider type. Multiple values allowed, such as aws, azure, gcp, kubernetes, github, or m365.",
|
||||
),
|
||||
provider_uid: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider-native account, subscription, or project IDs. Multiple values allowed.",
|
||||
),
|
||||
provider_alias: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by provider alias/name using partial matching.",
|
||||
),
|
||||
region: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud regions. Multiple values allowed.",
|
||||
),
|
||||
service: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud services. Multiple values allowed.",
|
||||
),
|
||||
resource_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource types. Multiple values allowed.",
|
||||
),
|
||||
resource_name: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource name using partial matching.",
|
||||
),
|
||||
resource_uid: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource UID using partial matching.",
|
||||
),
|
||||
resource_group: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource group values. Multiple values allowed.",
|
||||
),
|
||||
category: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by finding categories. Multiple values allowed.",
|
||||
),
|
||||
check_id: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by check IDs. Multiple values allowed.",
|
||||
),
|
||||
check_title: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by check title using partial matching.",
|
||||
),
|
||||
severity: list[SeverityFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by aggregated severity. Empty returns all severities.",
|
||||
),
|
||||
status: list[StatusFilter] = Field(
|
||||
default=["FAIL"],
|
||||
description="Filter by aggregated status. Default returns failing groups. Pass [] to return all statuses.",
|
||||
),
|
||||
muted: bool | str | None = Field(
|
||||
default=None,
|
||||
description="Filter by fully muted group state. Accepts true/false.",
|
||||
),
|
||||
include_muted: bool | str = Field(
|
||||
default=False,
|
||||
description="When false, excludes fully muted groups. Set true to include fully muted groups.",
|
||||
),
|
||||
delta: list[DeltaFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by group delta values: new or changed.",
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
sort: str | None = Field(
|
||||
default=None,
|
||||
description="Optional sort expression supported by the finding-groups API, such as -fail_count,-severity,check_id.",
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50, description="Number of groups to return per page"
|
||||
),
|
||||
page_number: int = Field(
|
||||
default=1, description="Page number to retrieve (1-indexed)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""List finding groups aggregated by check ID.
|
||||
|
||||
Default behavior returns the latest non-muted FAIL groups for fast triage.
|
||||
Without dates this uses `/finding-groups/latest`. With `date_from` or
|
||||
`date_to`, this uses `/finding-groups` with a maximum 2-day date window.
|
||||
|
||||
Use this tool to find noisy or high-impact checks, then call
|
||||
prowler_app_get_finding_group_details for complete counters or
|
||||
prowler_app_list_finding_group_resources to drill into affected resources.
|
||||
"""
|
||||
try:
|
||||
self.api_client.validate_page_size(page_size)
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._group_endpoint(date_range)
|
||||
|
||||
self._apply_common_filters(
|
||||
params,
|
||||
provider,
|
||||
provider_type,
|
||||
provider_uid,
|
||||
provider_alias,
|
||||
region,
|
||||
service,
|
||||
resource_type,
|
||||
resource_name,
|
||||
resource_uid,
|
||||
resource_group,
|
||||
category,
|
||||
check_id,
|
||||
check_title,
|
||||
severity,
|
||||
status,
|
||||
muted,
|
||||
delta,
|
||||
)
|
||||
|
||||
params["filter[include_muted]"] = self._bool_value(include_muted)
|
||||
params["page[size]"] = page_size
|
||||
params["page[number]"] = page_number
|
||||
params["fields[finding-groups]"] = GROUP_LIST_FIELDS
|
||||
if sort:
|
||||
params["sort"] = sort
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
response = FindingGroupsListResponse.from_api_response(api_response)
|
||||
return response.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing finding groups: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
|
||||
async def get_finding_group_details(
|
||||
self,
|
||||
check_id: str = Field(
|
||||
description="Public check ID that identifies the finding group. This is not a UUID."
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""Get complete details for one finding group by exact check ID.
|
||||
|
||||
Uses `filter[check_id]` exact matching against latest data by default,
|
||||
or historical data when dates are provided. Fully muted groups are
|
||||
included by default so accepted risk does not look like a missing group.
|
||||
"""
|
||||
try:
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._group_endpoint(date_range)
|
||||
|
||||
params.update(
|
||||
{
|
||||
"filter[check_id]": check_id,
|
||||
"filter[include_muted]": True,
|
||||
"page[size]": 1,
|
||||
"page[number]": 1,
|
||||
"fields[finding-groups]": GROUP_DETAIL_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
data = api_response.get("data", [])
|
||||
|
||||
if not data:
|
||||
return {
|
||||
"error": f"Finding group '{check_id}' not found.",
|
||||
"status": "not_found",
|
||||
}
|
||||
|
||||
group = DetailedFindingGroup.from_api_response(data[0])
|
||||
return group.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting finding group details: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
|
||||
async def list_finding_group_resources(
|
||||
self,
|
||||
check_id: str = Field(
|
||||
description="Public check ID that identifies the finding group. This is not a UUID."
|
||||
),
|
||||
provider: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider UUIDs. Multiple values allowed.",
|
||||
),
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider type. Multiple values allowed.",
|
||||
),
|
||||
provider_uid: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider-native account, subscription, or project IDs. Multiple values allowed.",
|
||||
),
|
||||
provider_alias: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by provider alias/name using partial matching.",
|
||||
),
|
||||
region: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud regions. Multiple values allowed.",
|
||||
),
|
||||
service: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud services. Multiple values allowed.",
|
||||
),
|
||||
resource_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource types. Multiple values allowed.",
|
||||
),
|
||||
resource_name: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource name using partial matching.",
|
||||
),
|
||||
resource_uid: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource UID using partial matching.",
|
||||
),
|
||||
resource_group: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource group values. Multiple values allowed.",
|
||||
),
|
||||
category: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by finding categories. Multiple values allowed.",
|
||||
),
|
||||
severity: list[SeverityFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by severity. Empty returns all severities.",
|
||||
),
|
||||
status: list[StatusFilter] = Field(
|
||||
default=["FAIL"],
|
||||
description="Filter by status. Default returns failing resources. Pass [] to return all statuses.",
|
||||
),
|
||||
muted: bool | str | None = Field(
|
||||
default=None,
|
||||
description="Filter by muted state. Accepts true/false. Overrides include_muted when provided.",
|
||||
),
|
||||
include_muted: bool | str = Field(
|
||||
default=False,
|
||||
description="When false, returns only actionable unmuted resources by applying muted=false. Set true to include muted and unmuted resources.",
|
||||
),
|
||||
delta: list[DeltaFilter] = Field(
|
||||
default=[], description="Filter by delta values: new or changed."
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
sort: str | None = Field(
|
||||
default=None,
|
||||
description="Optional sort expression supported by the finding group resources API.",
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50, description="Number of resources to return per page"
|
||||
),
|
||||
page_number: int = Field(
|
||||
default=1, description="Page number to retrieve (1-indexed)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""List resources affected by a finding group.
|
||||
|
||||
Without dates this uses `/finding-groups/latest/{check_id}/resources`.
|
||||
With `date_from` or `date_to`, this uses
|
||||
`/finding-groups/{check_id}/resources` with a maximum 2-day date window.
|
||||
|
||||
Default behavior returns FAIL, unmuted resources so the result is
|
||||
actionable. Set `include_muted=True` to include accepted/suppressed
|
||||
resources too. Each row includes nested resource and provider data plus
|
||||
`finding_id`. Use `prowler_app_get_finding_details(finding_id)` to
|
||||
retrieve complete remediation guidance for a specific resource finding.
|
||||
"""
|
||||
try:
|
||||
self.api_client.validate_page_size(page_size)
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._resource_endpoint(check_id, date_range)
|
||||
|
||||
if muted is None and not self._bool_value(include_muted):
|
||||
muted = False
|
||||
|
||||
self._apply_common_filters(
|
||||
params,
|
||||
provider,
|
||||
provider_type,
|
||||
provider_uid,
|
||||
provider_alias,
|
||||
region,
|
||||
service,
|
||||
resource_type,
|
||||
resource_name,
|
||||
resource_uid,
|
||||
resource_group,
|
||||
category,
|
||||
[],
|
||||
None,
|
||||
severity,
|
||||
status,
|
||||
muted,
|
||||
delta,
|
||||
)
|
||||
|
||||
params["page[size]"] = page_size
|
||||
params["page[number]"] = page_number
|
||||
params["fields[finding-group-resources]"] = RESOURCE_FIELDS
|
||||
if sort:
|
||||
params["sort"] = sort
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
response = FindingGroupResourcesListResponse.from_api_response(
|
||||
api_response
|
||||
)
|
||||
return response.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing finding group resources: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
@@ -14,15 +14,19 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Action | Skill |
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding a compliance output formatter (per-provider class + table dispatcher) | `prowler-compliance` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| Auditing check-to-requirement mappings as a cloud auditor | `prowler-compliance` |
|
||||
| Create PR that requires changelog entry | `prowler-changelog` |
|
||||
| Creating new checks | `prowler-sdk-check` |
|
||||
| Creating/updating compliance frameworks | `prowler-compliance` |
|
||||
| Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs) | `prowler-compliance` |
|
||||
| Mapping checks to compliance controls | `prowler-compliance` |
|
||||
| Mocking AWS with moto in tests | `prowler-test-sdk` |
|
||||
| Review changelog format and conventions | `prowler-changelog` |
|
||||
| Reviewing compliance framework PRs | `prowler-compliance-review` |
|
||||
| Syncing compliance framework with upstream catalog | `prowler-compliance` |
|
||||
| Update CHANGELOG.md in any component | `prowler-changelog` |
|
||||
| Updating existing checks and metadata | `prowler-sdk-check` |
|
||||
| Writing Prowler SDK tests | `prowler-test-sdk` |
|
||||
@@ -81,7 +85,7 @@ class {check_name}(Check):
|
||||
|
||||
## TECH STACK
|
||||
|
||||
Python 3.10+ | Poetry 2.3+ | pytest | moto (AWS mocking) | Pre-commit hooks (black, flake8, pylint, bandit)
|
||||
Python 3.10+ | uv | pytest | moto (AWS mocking) | Pre-commit hooks (black, flake8, pylint, bandit)
|
||||
|
||||
---
|
||||
|
||||
@@ -108,20 +112,20 @@ prowler/
|
||||
|
||||
```bash
|
||||
# Setup
|
||||
poetry install --with dev
|
||||
poetry run pre-commit install
|
||||
uv sync
|
||||
uv run pre-commit install
|
||||
|
||||
# Run Prowler
|
||||
poetry run python prowler-cli.py {provider}
|
||||
poetry run python prowler-cli.py {provider} --check {check_name}
|
||||
poetry run python prowler-cli.py {provider} --list-checks
|
||||
uv run python prowler-cli.py {provider}
|
||||
uv run python prowler-cli.py {provider} --check {check_name}
|
||||
uv run python prowler-cli.py {provider} --list-checks
|
||||
|
||||
# Testing
|
||||
poetry run pytest -n auto -vvv tests/
|
||||
poetry run pytest tests/providers/{provider}/services/{service}/ -v
|
||||
uv run pytest -n auto -vvv tests/
|
||||
uv run pytest tests/providers/{provider}/services/{service}/ -v
|
||||
|
||||
# Code Quality
|
||||
poetry run pre-commit run --all-files
|
||||
uv run pre-commit run --all-files
|
||||
```
|
||||
|
||||
---
|
||||
@@ -141,8 +145,8 @@ poetry run pre-commit run --all-files
|
||||
|
||||
## QA CHECKLIST
|
||||
|
||||
- [ ] `poetry run pytest` passes
|
||||
- [ ] `poetry run pre-commit run --all-files` passes
|
||||
- [ ] `uv run pytest` passes
|
||||
- [ ] `uv run pre-commit run --all-files` passes
|
||||
- [ ] Check metadata JSON is valid
|
||||
- [ ] Tests cover PASS, FAIL, and empty resource scenarios
|
||||
- [ ] Docstrings follow Google style
|
||||
|
||||
@@ -6,13 +6,33 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- 6 Chat file sharing, external messaging, spaces, and apps access checks for Google Workspace provider using the Cloud Identity Policy API [(#11126)](https://github.com/prowler-cloud/prowler/pull/11126)
|
||||
- `entra_service_principal_no_secrets_for_permanent_tier0_roles` check for M365 provider [(#10788)](https://github.com/prowler-cloud/prowler/pull/10788)
|
||||
- `iam_user_access_not_stale_to_sagemaker` check for AWS provider with configurable `max_unused_sagemaker_access_days` (default 90) [(#11000)](https://github.com/prowler-cloud/prowler/pull/11000)
|
||||
- `cloudtrail_bedrock_logging_enabled` check for AWS provider [(#10858)](https://github.com/prowler-cloud/prowler/pull/10858)
|
||||
- Okta provider with OAuth 2.0 authentication and `signon_global_session_idle_timeout_15min` check [(#11079)](https://github.com/prowler-cloud/prowler/pull/11079)
|
||||
- `sagemaker_domain_sso_configured` check for AWS provider [(#11094)](https://github.com/prowler-cloud/prowler/pull/11094)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- `entra_emergency_access_exclusion` check for M365 provider now scopes the exclusion requirement to enabled Conditional Access policies with a `Block` grant control instead of every enabled policy, focusing on the lockout-relevant policy set [(#10849)](https://github.com/prowler-cloud/prowler/pull/10849)
|
||||
- AWS IAM customer-managed policy checks no longer emit `FAIL` on unattached policies unless `--scan-unused-services` is enabled [(#11150)](https://github.com/prowler-cloud/prowler/pull/11150)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Google Workspace Directory checks sharing a single resource row, causing the service field to be overwritten by the last check executed [(#11176)](https://github.com/prowler-cloud/prowler/pull/11176)
|
||||
- Google Workspace Calendar and Drive services sharing a single resource row, causing the service field to be overwritten by the last check executed [(#11161)](https://github.com/prowler-cloud/prowler/pull/11161)
|
||||
- `zone_waf_enabled` check for Cloudflare provider now appends a plan-aware hint to the FAIL `status_extended`: a possible-false-positive note on paid plans (Pro, Business, Enterprise) where the legacy `waf` zone setting can read `off` even though WAF managed rulesets are deployed via the dashboard, and a "not available on the Cloudflare Free plan" note on Free zones [(#9896)](https://github.com/prowler-cloud/prowler/pull/9896)
|
||||
- Google Workspace Gmail checks sharing a single resource row, causing the service field to be overwritten by the last check executed [(#11169)](https://github.com/prowler-cloud/prowler/pull/11169)
|
||||
|
||||
---
|
||||
|
||||
## [5.26.2] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `entra_users_mfa_capable` and `entra_break_glass_account_fido2_security_key_registered` report a preventive FAIL per affected user (with the missing permission named) when the M365 service principal lacks `AuditLog.Read.All`, instead of mass false positives [(#10907)](https://github.com/prowler-cloud/prowler/pull/10907)
|
||||
- Update duplicated GCP CIS requirements IDs [(#11180)](https://github.com/prowler-cloud/prowler/pull/11180)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -154,6 +154,7 @@ from prowler.providers.llm.models import LLMOutputOptions
|
||||
from prowler.providers.m365.models import M365OutputOptions
|
||||
from prowler.providers.mongodbatlas.models import MongoDBAtlasOutputOptions
|
||||
from prowler.providers.nhn.models import NHNOutputOptions
|
||||
from prowler.providers.okta.models import OktaOutputOptions
|
||||
from prowler.providers.openstack.models import OpenStackOutputOptions
|
||||
from prowler.providers.oraclecloud.models import OCIOutputOptions
|
||||
from prowler.providers.vercel.models import VercelOutputOptions
|
||||
@@ -426,6 +427,10 @@ def prowler():
|
||||
output_options = VercelOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "okta":
|
||||
output_options = OktaOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
|
||||
@@ -914,7 +914,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "3.1",
|
||||
"Id": "3.10",
|
||||
"Description": "Use Identity Aware Proxy (IAP) to Ensure Only Traffic From Google IP Addresses are 'Allowed'",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
@@ -1132,7 +1132,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.1",
|
||||
"Id": "4.10",
|
||||
"Description": "Ensure That App Engine Applications Enforce HTTPS Connections",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
|
||||
@@ -1084,7 +1084,9 @@
|
||||
{
|
||||
"Id": "3.1.4.1.1",
|
||||
"Description": "Ensure external filesharing in Google Chat and Hangouts is disabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_external_file_sharing_disabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1105,7 +1107,9 @@
|
||||
{
|
||||
"Id": "3.1.4.1.2",
|
||||
"Description": "Ensure internal filesharing in Google Chat and Hangouts is disabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_internal_file_sharing_disabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1126,7 +1130,9 @@
|
||||
{
|
||||
"Id": "3.1.4.2.1",
|
||||
"Description": "Ensure Google Chat externally is restricted to allowed domains",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_external_messaging_restricted"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1147,7 +1153,9 @@
|
||||
{
|
||||
"Id": "3.1.4.3.1",
|
||||
"Description": "Ensure external spaces in Google Chat and Hangouts are restricted",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_external_spaces_restricted"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1168,7 +1176,9 @@
|
||||
{
|
||||
"Id": "3.1.4.4.1",
|
||||
"Description": "Ensure allow users to install Chat apps is disabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_apps_installation_disabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1189,7 +1199,9 @@
|
||||
{
|
||||
"Id": "3.1.4.4.2",
|
||||
"Description": "Ensure allow users to add and use incoming webhooks is disabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_incoming_webhooks_disabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
|
||||
@@ -1466,7 +1466,9 @@
|
||||
{
|
||||
"Id": "GWS.CHAT.2.1",
|
||||
"Description": "External file sharing SHALL be disabled to protect sensitive information from unauthorized or accidental sharing",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_external_file_sharing_disabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Chat",
|
||||
@@ -1492,7 +1494,9 @@
|
||||
{
|
||||
"Id": "GWS.CHAT.4.1",
|
||||
"Description": "External chat messaging SHALL be restricted to allowlisted domains only",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"chat_external_messaging_restricted"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Chat",
|
||||
|
||||