Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 832b01ae2a | |||
| 51d821fd67 | |||
| 25682479e3 | |||
| 9d48a894b1 | |||
| 43da3fb29e | |||
| e7a67eff5d |
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
|
||||
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.25.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.23.1
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
@@ -13,15 +13,11 @@ inputs:
|
||||
poetry-version:
|
||||
description: 'Poetry version to install'
|
||||
required: false
|
||||
default: '2.3.4'
|
||||
default: '2.1.1'
|
||||
install-dependencies:
|
||||
description: 'Install Python dependencies with Poetry'
|
||||
required: false
|
||||
default: 'true'
|
||||
update-lock:
|
||||
description: 'Run `poetry lock` during setup. Only enable when a prior step mutates pyproject.toml (e.g. API `@master` VCS rewrite). Default: false.'
|
||||
required: false
|
||||
default: 'false'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -78,7 +74,7 @@ runs:
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock (prowler repo only)
|
||||
if: github.repository == 'prowler-cloud/prowler' && inputs.update-lock == 'true'
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: poetry lock
|
||||
|
||||
@@ -66,18 +66,6 @@ updates:
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
- package-ecosystem: "pre-commit"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pre-commit"
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/04/15
|
||||
# v4.6
|
||||
# - package-ecosystem: "pip"
|
||||
|
||||
@@ -13,8 +13,6 @@ env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -17,8 +17,6 @@ concurrency:
|
||||
env:
|
||||
API_WORKING_DIR: ./api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-code-quality:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -69,7 +67,6 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -24,8 +24,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-analyze:
|
||||
name: CodeQL Security Analysis
|
||||
|
||||
@@ -33,8 +33,6 @@ env:
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -18,8 +18,6 @@ env:
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -17,8 +17,6 @@ concurrency:
|
||||
env:
|
||||
API_WORKING_DIR: ./api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-security-scans:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -72,7 +70,6 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Bandit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -30,8 +30,6 @@ env:
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-tests:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -118,7 +116,6 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Run tests with pytest
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -17,8 +17,6 @@ env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
|
||||
@@ -21,8 +21,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
zizmor:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -9,8 +9,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
update-labels:
|
||||
if: contains(github.event.issue.labels.*.name, 'status/awaiting-response')
|
||||
|
||||
@@ -16,8 +16,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -13,8 +13,6 @@ env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_COLOR: B60205
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
create-label:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -13,8 +13,6 @@ env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -14,8 +14,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
scan-secrets:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -21,8 +21,6 @@ concurrency:
|
||||
env:
|
||||
CHART_PATH: contrib/k8s/helm/prowler-app
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
helm-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -13,8 +13,6 @@ concurrency:
|
||||
env:
|
||||
CHART_PATH: contrib/k8s/helm/prowler-app
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
release-helm-chart:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -9,8 +9,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
if: |
|
||||
@@ -29,7 +27,7 @@ jobs:
|
||||
api.github.com:443
|
||||
|
||||
- name: Comment and lock issue
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
with:
|
||||
destination: /opt/gh-aw/actions
|
||||
- name: Check workflow file timestamps
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_WORKFLOW_FILE: "issue-triage.lock.yml"
|
||||
with:
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
await main();
|
||||
- name: Compute current body text
|
||||
id: compute-text
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
- name: Add comment with workflow run link
|
||||
id: add-comment
|
||||
if: github.event_name == 'issues' || github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion' || github.event_name == 'discussion_comment' || (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.id == github.repository_id)
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_WORKFLOW_NAME: "Issue Triage"
|
||||
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🤖 Generated by [Prowler Issue Triage]({run_url}) [Experimental]\"}"
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Merge remote .github folder
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_FILE: ".github/agents/issue-triage.md"
|
||||
GH_AW_AGENT_IMPORT_SPEC: "../agents/issue-triage.md"
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
id: checkout-pr
|
||||
if: |
|
||||
github.event.pull_request
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
await main();
|
||||
- name: Generate agentic run info
|
||||
id: generate_aw_info
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -511,7 +511,7 @@ jobs:
|
||||
}
|
||||
GH_AW_MCP_CONFIG_EOF
|
||||
- name: Generate workflow overview
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs');
|
||||
@@ -606,7 +606,7 @@ jobs:
|
||||
{{#runtime-import .github/workflows/issue-triage.md}}
|
||||
GH_AW_PROMPT_EOF
|
||||
- name: Substitute placeholders
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
|
||||
GH_AW_GITHUB_ACTOR: ${{ github.actor }}
|
||||
@@ -640,7 +640,7 @@ jobs:
|
||||
}
|
||||
});
|
||||
- name: Interpolate variables and render templates
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
|
||||
GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
@@ -757,7 +757,7 @@ jobs:
|
||||
bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID"
|
||||
- name: Redact secrets in logs
|
||||
if: always()
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
|
||||
@@ -779,7 +779,7 @@ jobs:
|
||||
if-no-files-found: warn
|
||||
- name: Ingest agent output
|
||||
id: collect_output
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
|
||||
GH_AW_ALLOWED_DOMAINS: "*.pythonhosted.org,anaconda.org,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,binstar.org,bootstrap.pypa.io,conda.anaconda.org,conda.binstar.org,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,mcp.context7.com,mcp.prowler.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pip.pypa.io,ppa.launchpad.net,pypi.org,pypi.python.org,raw.githubusercontent.com,registry.npmjs.org,repo.anaconda.com,repo.continuum.io,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com"
|
||||
@@ -808,7 +808,7 @@ jobs:
|
||||
if-no-files-found: ignore
|
||||
- name: Parse agent logs for step summary
|
||||
if: always()
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/
|
||||
with:
|
||||
@@ -819,7 +819,7 @@ jobs:
|
||||
await main();
|
||||
- name: Parse MCP gateway logs for step summary
|
||||
if: always()
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
|
||||
@@ -891,7 +891,7 @@ jobs:
|
||||
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
|
||||
- name: Process No-Op Messages
|
||||
id: noop
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
|
||||
GH_AW_NOOP_MAX: 1
|
||||
@@ -905,7 +905,7 @@ jobs:
|
||||
await main();
|
||||
- name: Record Missing Tool
|
||||
id: missing_tool
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
|
||||
GH_AW_WORKFLOW_NAME: "Issue Triage"
|
||||
@@ -918,7 +918,7 @@ jobs:
|
||||
await main();
|
||||
- name: Handle Agent Failure
|
||||
id: handle_agent_failure
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
|
||||
GH_AW_WORKFLOW_NAME: "Issue Triage"
|
||||
@@ -937,7 +937,7 @@ jobs:
|
||||
await main();
|
||||
- name: Handle No-Op Message
|
||||
id: handle_noop_message
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
|
||||
GH_AW_WORKFLOW_NAME: "Issue Triage"
|
||||
@@ -954,7 +954,7 @@ jobs:
|
||||
await main();
|
||||
- name: Update reaction comment with completion status
|
||||
id: conclusion
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
|
||||
GH_AW_COMMENT_ID: ${{ needs.activation.outputs.comment_id }}
|
||||
@@ -1008,7 +1008,7 @@ jobs:
|
||||
run: |
|
||||
echo "Agent output-types: $AGENT_OUTPUT_TYPES"
|
||||
- name: Setup threat detection
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
WORKFLOW_NAME: "Issue Triage"
|
||||
WORKFLOW_DESCRIPTION: "[Experimental] AI-powered issue triage for Prowler - produces coding-agent-ready fix plans"
|
||||
@@ -1062,7 +1062,7 @@ jobs:
|
||||
XDG_CONFIG_HOME: /home/runner
|
||||
- name: Parse threat detection results
|
||||
id: parse_results
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
|
||||
@@ -1102,7 +1102,7 @@ jobs:
|
||||
- name: Add eyes reaction for immediate feedback
|
||||
id: react
|
||||
if: github.event_name == 'issues' || github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion' || github.event_name == 'discussion_comment' || (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.id == github.repository_id)
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_REACTION: "eyes"
|
||||
with:
|
||||
@@ -1114,7 +1114,7 @@ jobs:
|
||||
await main();
|
||||
- name: Check team membership for workflow
|
||||
id: check_membership
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_REQUIRED_ROLES: admin,maintainer,write
|
||||
with:
|
||||
@@ -1126,7 +1126,7 @@ jobs:
|
||||
await main();
|
||||
- name: Check user rate limit
|
||||
id: check_rate_limit
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_RATE_LIMIT_MAX: "5"
|
||||
GH_AW_RATE_LIMIT_WINDOW: "60"
|
||||
@@ -1185,7 +1185,7 @@ jobs:
|
||||
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
|
||||
- name: Process Safe Outputs
|
||||
id: process_safe_outputs
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
|
||||
GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":1},\"missing_data\":{},\"missing_tool\":{}}"
|
||||
|
||||
@@ -15,8 +15,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -32,8 +32,6 @@ env:
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-mcp
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -18,8 +18,6 @@ env:
|
||||
MCP_WORKING_DIR: ./mcp_server
|
||||
IMAGE_NAME: prowler-mcp
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
mcp-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -89,9 +87,6 @@ jobs:
|
||||
api.github.com:443
|
||||
mirror.gcr.io:443
|
||||
check.trivy.dev:443
|
||||
get.trivy.dev:443
|
||||
release-assets.githubusercontent.com:443
|
||||
objects.githubusercontent.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
@@ -14,8 +14,6 @@ env:
|
||||
PYTHON_VERSION: "3.12"
|
||||
WORKING_DIRECTORY: ./mcp_server
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -16,8 +16,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
|
||||
@@ -16,8 +16,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-compliance-mapping:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-compliance-check') == false
|
||||
|
||||
@@ -15,8 +15,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -12,8 +12,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
if: |
|
||||
|
||||
@@ -17,8 +17,6 @@ concurrency:
|
||||
env:
|
||||
PROWLER_VERSION: ${{ inputs.prowler_version }}
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.event_name == 'workflow_dispatch' && github.repository == 'prowler-cloud/prowler'
|
||||
@@ -40,11 +38,15 @@ jobs:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
install-dependencies: 'false'
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
python3 -m pip install --user poetry==2.1.1
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
|
||||
@@ -13,8 +13,6 @@ env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -10,8 +10,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-duplicate-test-names:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -14,8 +14,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-code-quality:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -71,11 +69,22 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Install Poetry
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
|
||||
- name: Check Poetry lock file
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -30,8 +30,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-analyze:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -47,8 +47,6 @@ env:
|
||||
# AWS configuration (for ECR)
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -76,14 +74,15 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
|
||||
- name: Inject poetry-bumpversion plugin
|
||||
run: pipx inject poetry poetry-bumpversion
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version and set tags
|
||||
id: get-prowler-version
|
||||
|
||||
@@ -17,8 +17,6 @@ concurrency:
|
||||
env:
|
||||
IMAGE_NAME: prowler
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -87,7 +85,6 @@ jobs:
|
||||
check.trivy.dev:443
|
||||
debian.map.fastlydns.net:80
|
||||
release-assets.githubusercontent.com:443
|
||||
objects.githubusercontent.com:443
|
||||
pypi.org:443
|
||||
files.pythonhosted.org:443
|
||||
www.powershellgallery.com:443
|
||||
|
||||
@@ -13,8 +13,6 @@ env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: '3.12'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -75,11 +73,13 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
|
||||
- name: Build Prowler package
|
||||
run: poetry build
|
||||
@@ -111,11 +111,13 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
|
||||
- name: Install toml package
|
||||
run: pip install toml
|
||||
|
||||
@@ -13,8 +13,6 @@ env:
|
||||
PYTHON_VERSION: '3.12'
|
||||
AWS_REGION: 'us-east-1'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
refresh-aws-regions:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -12,8 +12,6 @@ concurrency:
|
||||
env:
|
||||
PYTHON_VERSION: '3.12'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
refresh-oci-regions:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -14,8 +14,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-security-scans:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -71,11 +69,20 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Install Poetry
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python 3.12
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry install --no-root
|
||||
|
||||
- name: Security scan with Bandit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -14,8 +14,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -92,11 +90,20 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
- name: Install Poetry
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry install --no-root
|
||||
|
||||
# AWS Provider
|
||||
- name: Check if AWS files changed
|
||||
|
||||
@@ -31,8 +31,6 @@ on:
|
||||
description: "Whether there are UI E2E tests to run"
|
||||
value: ${{ jobs.analyze.outputs.has-ui-e2e }}
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -13,8 +13,6 @@ env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -26,8 +26,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
ui-analyze:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -35,8 +35,6 @@ env:
|
||||
# Build args
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -18,8 +18,6 @@ env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
IMAGE_NAME: prowler-ui
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
ui-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -91,8 +89,6 @@ jobs:
|
||||
mirror.gcr.io:443
|
||||
check.trivy.dev:443
|
||||
get.trivy.dev:443
|
||||
release-assets.githubusercontent.com:443
|
||||
objects.githubusercontent.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
@@ -15,8 +15,6 @@ on:
|
||||
- 'ui/**'
|
||||
- 'api/**' # API changes can affect UI E2E
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
# First, analyze which tests need to run
|
||||
impact-analysis:
|
||||
|
||||
@@ -18,8 +18,6 @@ env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
NODE_VERSION: '24.13.0'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
ui-tests:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -84,7 +84,6 @@ continue.json
|
||||
.continuerc.json
|
||||
|
||||
# AI Coding Assistants - OpenCode
|
||||
.opencode/
|
||||
opencode.json
|
||||
|
||||
# AI Coding Assistants - GitHub Copilot
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
@@ -16,7 +16,7 @@ repos:
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.16.0
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
@@ -24,21 +24,21 @@ repos:
|
||||
|
||||
## GITHUB ACTIONS
|
||||
- repo: https://github.com/zizmorcore/zizmor-pre-commit
|
||||
rev: v1.24.1
|
||||
rev: v1.6.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
files: ^\.github/
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.11.0
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.3
|
||||
rev: v2.3.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
exclude: ^skills/
|
||||
@@ -50,27 +50,27 @@ repos:
|
||||
]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 8.0.1
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
exclude: ^skills/
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 26.3.1
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
exclude: ^skills/
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 7.3.0
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: (contrib|^skills/)
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.3.4
|
||||
rev: 2.1.1
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
@@ -93,7 +93,7 @@ repos:
|
||||
pass_filenames: false
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.14.0
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013"]
|
||||
|
||||
@@ -13,7 +13,7 @@ build:
|
||||
post_create_environment:
|
||||
# Install poetry
|
||||
# https://python-poetry.org/docs/#installing-manually
|
||||
- python -m pip install poetry==2.3.4
|
||||
- python -m pip install poetry
|
||||
post_install:
|
||||
# Install dependencies with 'docs' dependency group
|
||||
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
|
||||
|
||||
@@ -140,7 +140,7 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
|
||||
| Component | Location | Tech Stack |
|
||||
|-----------|----------|------------|
|
||||
| SDK | `prowler/` | Python 3.10+, Poetry 2.3+ |
|
||||
| SDK | `prowler/` | Python 3.10+, Poetry |
|
||||
| API | `api/` | Django 5.1, DRF, Celery |
|
||||
| UI | `ui/` | Next.js 15, React 19, Tailwind 4 |
|
||||
| MCP Server | `mcp_server/` | FastMCP, Python 3.12+ |
|
||||
@@ -153,12 +153,12 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
```bash
|
||||
# Setup
|
||||
poetry install --with dev
|
||||
poetry run prek install
|
||||
poetry run pre-commit install
|
||||
|
||||
# Code quality
|
||||
poetry run make lint
|
||||
poetry run make format
|
||||
poetry run prek run --all-files
|
||||
poetry run pre-commit run --all-files
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -68,7 +68,7 @@ ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry==2.3.4
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
@@ -246,7 +246,14 @@ Some pre-commit hooks require tools installed on your system:
|
||||
|
||||
1. **Install [TruffleHog](https://github.com/trufflesecurity/trufflehog#install)** (secret scanning) — see the [official installation options](https://github.com/trufflesecurity/trufflehog#install).
|
||||
|
||||
2. **Install [Hadolint](https://github.com/hadolint/hadolint#install)** (Dockerfile linting) — see the [official installation options](https://github.com/hadolint/hadolint#install).
|
||||
2. **Install [Safety](https://github.com/pyupio/safety)** (dependency vulnerability checking):
|
||||
|
||||
```console
|
||||
# Requires a Python environment (e.g. via pyenv)
|
||||
pip install safety
|
||||
```
|
||||
|
||||
3. **Install [Hadolint](https://github.com/hadolint/hadolint#install)** (Dockerfile linting) — see the [official installation options](https://github.com/hadolint/hadolint#install).
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
|
||||
@@ -2,33 +2,6 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.25.1] (Prowler v5.24.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Attack Paths: Missing `tenant_id` filter while getting related findings after scan completes [(#10722)](https://github.com/prowler-cloud/prowler/pull/10722)
|
||||
|
||||
---
|
||||
|
||||
## [1.25.0] (Prowler v5.24.0)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Bump Poetry to `2.3.4` in Dockerfile and pre-commit hooks. Regenerate `api/poetry.lock` [(#10681)](https://github.com/prowler-cloud/prowler/pull/10681)
|
||||
- Attack Paths: Remove dead `cleanup_findings` no-op and its supporting `prowler_finding_lastupdated` index [(#10684)](https://github.com/prowler-cloud/prowler/pull/10684)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Worker-beat race condition on cold start: replaced `sleep 15` with API service healthcheck dependency (Docker Compose) and init containers (Helm), aligned Gunicorn default port to `8080` [(#10603)](https://github.com/prowler-cloud/prowler/pull/10603)
|
||||
- API container startup crash on Linux due to root-owned bind-mount preventing JWT key generation [(#10646)](https://github.com/prowler-cloud/prowler/pull/10646)
|
||||
- Finding group resources endpoints now include findings without associated resources (orphan IaC findings) as simulated resource rows, and return one row per finding when multiple findings share a resource [(#10708)](https://github.com/prowler-cloud/prowler/pull/10708)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `pytest` from 8.2.2 to 9.0.3 to fix CVE-2025-71176 [(#10678)](https://github.com/prowler-cloud/prowler/pull/10678)
|
||||
|
||||
---
|
||||
|
||||
## [1.24.0] (Prowler v5.23.0)
|
||||
|
||||
### 🚀 Added
|
||||
@@ -39,7 +12,6 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
- Finding groups list and latest endpoints support `sort=delta`, ordering by `new_count` then `changed_count` so groups with the most new findings rank highest [(#10606)](https://github.com/prowler-cloud/prowler/pull/10606)
|
||||
- Finding group resources endpoints (`/finding-groups/{check_id}/resources` and `/finding-groups/latest/{check_id}/resources`) now expose `finding_id` per row, pointing to the most recent matching Finding for each resource. UUIDv7 ordering guarantees `Max(finding__id)` resolves to the latest snapshot [(#10630)](https://github.com/prowler-cloud/prowler/pull/10630)
|
||||
- Handle CIS and CISA SCuBA compliance framework from google workspace [(#10629)](https://github.com/prowler-cloud/prowler/pull/10629)
|
||||
- Sort support for all finding group counter fields: `pass_muted_count`, `fail_muted_count`, `manual_muted_count`, and all `new_*`/`changed_*` status-mute breakdown counters [(#10655)](https://github.com/prowler-cloud/prowler/pull/10655)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ RUN mkdir -p /tmp/prowler_api_output
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry==2.3.4
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@ start_worker() {
|
||||
|
||||
start_worker_beat() {
|
||||
echo "Starting the worker-beat..."
|
||||
sleep 15
|
||||
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
}
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ dependencies = [
|
||||
"defusedxml==0.7.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.23",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (==1.3.0)",
|
||||
"sentry-sdk[django] (==2.56.0)",
|
||||
@@ -50,7 +50,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.26.0"
|
||||
version = "1.24.1"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
@@ -65,7 +65,7 @@ freezegun = "1.5.1"
|
||||
marshmallow = "==3.26.2"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "9.0.3"
|
||||
pytest = "8.2.2"
|
||||
pytest-cov = "5.0.0"
|
||||
pytest-django = "4.8.0"
|
||||
pytest-env = "1.1.3"
|
||||
@@ -75,4 +75,3 @@ ruff = "0.5.0"
|
||||
safety = "3.7.0"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
prek = "0.3.9"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Prowler API
|
||||
version: 1.26.0
|
||||
version: 1.24.1
|
||||
description: |-
|
||||
Prowler API specification.
|
||||
|
||||
|
||||
@@ -57,7 +57,6 @@ from api.models import (
|
||||
ProviderGroupMembership,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
SAMLConfiguration,
|
||||
@@ -16031,36 +16030,6 @@ class TestFindingGroupViewSet:
|
||||
# s3_bucket_public_access has 2 findings with 2 different resources
|
||||
assert len(data) == 2
|
||||
|
||||
def test_resources_id_matches_resource_id_for_mapped_findings(
|
||||
self, authenticated_client, finding_groups_fixture
|
||||
):
|
||||
"""Findings with a resource expose the resource id as row id (hot path contract)."""
|
||||
response = authenticated_client.get(
|
||||
reverse(
|
||||
"finding-group-resources", kwargs={"pk": "s3_bucket_public_access"}
|
||||
),
|
||||
{"filter[inserted_at]": TODAY},
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert data, "expected resources in response"
|
||||
|
||||
resource_ids = set(
|
||||
ResourceFindingMapping.objects.filter(
|
||||
finding__check_id="s3_bucket_public_access",
|
||||
).values_list("resource_id", flat=True)
|
||||
)
|
||||
finding_ids = set(
|
||||
Finding.objects.filter(
|
||||
check_id="s3_bucket_public_access",
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
|
||||
returned_ids = {item["id"] for item in data}
|
||||
assert returned_ids <= {str(rid) for rid in resource_ids}
|
||||
assert returned_ids.isdisjoint({str(fid) for fid in finding_ids})
|
||||
|
||||
def test_resources_fields(self, authenticated_client, finding_groups_fixture):
|
||||
"""Test resource fields (uid, name, service, region, type) have valid values."""
|
||||
response = authenticated_client.get(
|
||||
@@ -17077,57 +17046,6 @@ class TestFindingGroupViewSet:
|
||||
# Descending boolean: True (1) before False (0)
|
||||
assert muted_values == sorted(muted_values, reverse=True)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"endpoint_name", ["finding-group-list", "finding-group-latest"]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"sort_field",
|
||||
[
|
||||
"pass_muted_count",
|
||||
"fail_muted_count",
|
||||
"manual_muted_count",
|
||||
"new_fail_count",
|
||||
"new_fail_muted_count",
|
||||
"new_pass_count",
|
||||
"new_pass_muted_count",
|
||||
"new_manual_count",
|
||||
"new_manual_muted_count",
|
||||
"changed_fail_count",
|
||||
"changed_fail_muted_count",
|
||||
"changed_pass_count",
|
||||
"changed_pass_muted_count",
|
||||
"changed_manual_count",
|
||||
"changed_manual_muted_count",
|
||||
],
|
||||
)
|
||||
def test_finding_groups_sort_by_counter_fields(
|
||||
self,
|
||||
authenticated_client,
|
||||
finding_groups_fixture,
|
||||
endpoint_name,
|
||||
sort_field,
|
||||
):
|
||||
"""All counter fields are accepted as sort parameters (asc and desc)."""
|
||||
params = {"sort": f"-{sort_field}"}
|
||||
if endpoint_name == "finding-group-list":
|
||||
params["filter[inserted_at]"] = TODAY
|
||||
|
||||
response = authenticated_client.get(reverse(endpoint_name), params)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert len(data) > 0
|
||||
|
||||
desc_values = [item["attributes"][sort_field] for item in data]
|
||||
assert desc_values == sorted(desc_values, reverse=True)
|
||||
|
||||
params["sort"] = sort_field
|
||||
response = authenticated_client.get(reverse(endpoint_name), params)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
asc_values = [
|
||||
item["attributes"][sort_field] for item in response.json()["data"]
|
||||
]
|
||||
assert asc_values == sorted(asc_values)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"endpoint_name", ["finding-group-list", "finding-group-latest"]
|
||||
)
|
||||
|
||||
@@ -4225,11 +4225,10 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
|
||||
Serializer for Finding Group Resources - resources within a finding group.
|
||||
|
||||
Returns individual resources with their current status, severity,
|
||||
and timing information. Orphan findings (without any resource) expose the
|
||||
finding id as `id` so the row stays identifiable in the UI.
|
||||
and timing information.
|
||||
"""
|
||||
|
||||
id = serializers.UUIDField(source="row_id")
|
||||
id = serializers.UUIDField(source="resource_id")
|
||||
resource = serializers.SerializerMethodField()
|
||||
provider = serializers.SerializerMethodField()
|
||||
finding_id = serializers.UUIDField()
|
||||
|
||||
@@ -35,13 +35,11 @@ from django.db.models import (
|
||||
CharField,
|
||||
Count,
|
||||
DecimalField,
|
||||
Exists,
|
||||
ExpressionWrapper,
|
||||
F,
|
||||
IntegerField,
|
||||
Max,
|
||||
Min,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
QuerySet,
|
||||
@@ -417,7 +415,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.26.0"
|
||||
spectacular_settings.VERSION = "1.24.1"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -7313,23 +7311,8 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
"pass_count": "pass_count",
|
||||
"manual_count": "manual_count",
|
||||
"muted_count": "muted_count",
|
||||
"pass_muted_count": "pass_muted_count",
|
||||
"fail_muted_count": "fail_muted_count",
|
||||
"manual_muted_count": "manual_muted_count",
|
||||
"new_count": "new_count",
|
||||
"new_fail_count": "new_fail_count",
|
||||
"new_fail_muted_count": "new_fail_muted_count",
|
||||
"new_pass_count": "new_pass_count",
|
||||
"new_pass_muted_count": "new_pass_muted_count",
|
||||
"new_manual_count": "new_manual_count",
|
||||
"new_manual_muted_count": "new_manual_muted_count",
|
||||
"changed_count": "changed_count",
|
||||
"changed_fail_count": "changed_fail_count",
|
||||
"changed_fail_muted_count": "changed_fail_muted_count",
|
||||
"changed_pass_count": "changed_pass_count",
|
||||
"changed_pass_muted_count": "changed_pass_muted_count",
|
||||
"changed_manual_count": "changed_manual_count",
|
||||
"changed_manual_muted_count": "changed_manual_muted_count",
|
||||
"resources_total": "resources_total",
|
||||
"resources_fail": "resources_fail",
|
||||
"first_seen_at": "agg_first_seen_at",
|
||||
@@ -7580,53 +7563,6 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
.order_by(*ordering)
|
||||
)
|
||||
|
||||
def _orphan_findings_queryset(self, filtered_queryset, finding_ids=None):
|
||||
"""Findings in the filtered set with no ResourceFindingMapping entries."""
|
||||
orphan_qs = filtered_queryset.filter(
|
||||
~Exists(ResourceFindingMapping.objects.filter(finding_id=OuterRef("pk")))
|
||||
)
|
||||
if finding_ids is not None:
|
||||
orphan_qs = orphan_qs.filter(id__in=finding_ids)
|
||||
return orphan_qs
|
||||
|
||||
def _has_orphan_findings(self, filtered_queryset) -> bool:
|
||||
"""Return True if any finding in the filtered set has no resource mapping."""
|
||||
return self._orphan_findings_queryset(filtered_queryset).exists()
|
||||
|
||||
def _orphan_aggregation_values(self, orphan_queryset):
|
||||
"""Raw rows for orphan findings; resource payload synthesized from metadata.
|
||||
|
||||
check_metadata is stored with lowercase keys (see
|
||||
`prowler.lib.outputs.finding.Finding.get_metadata`) and
|
||||
`Finding.resource_groups` is already denormalized at ingest time.
|
||||
"""
|
||||
return orphan_queryset.annotate(
|
||||
_provider_type=F("scan__provider__provider"),
|
||||
_provider_uid=F("scan__provider__uid"),
|
||||
_provider_alias=F("scan__provider__alias"),
|
||||
_svc=KeyTextTransform("servicename", "check_metadata"),
|
||||
_region=KeyTextTransform("region", "check_metadata"),
|
||||
_rtype=KeyTextTransform("resourcetype", "check_metadata"),
|
||||
_rgroup=F("resource_groups"),
|
||||
).values(
|
||||
"id",
|
||||
"uid",
|
||||
"status",
|
||||
"severity",
|
||||
"delta",
|
||||
"muted",
|
||||
"muted_reason",
|
||||
"first_seen_at",
|
||||
"inserted_at",
|
||||
"_provider_type",
|
||||
"_provider_uid",
|
||||
"_provider_alias",
|
||||
"_svc",
|
||||
"_region",
|
||||
"_rtype",
|
||||
"_rgroup",
|
||||
)
|
||||
|
||||
def _post_process_resources(self, resource_data):
|
||||
"""Convert resource aggregation rows to API output."""
|
||||
results = []
|
||||
@@ -7648,13 +7584,9 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
else:
|
||||
delta = None
|
||||
|
||||
resource_id = row["resource_id"]
|
||||
finding_id = str(row["finding_id"]) if row.get("finding_id") else None
|
||||
|
||||
results.append(
|
||||
{
|
||||
"row_id": resource_id,
|
||||
"resource_id": resource_id,
|
||||
"resource_id": row["resource_id"],
|
||||
"resource_uid": row["resource_uid"],
|
||||
"resource_name": row["resource_name"],
|
||||
"resource_service": row["resource_service"],
|
||||
@@ -7673,46 +7605,9 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
"muted": bool(row.get("muted", False)),
|
||||
"muted_reason": row.get("muted_reason"),
|
||||
"resource_group": row.get("resource_group", ""),
|
||||
"finding_id": finding_id,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _post_process_orphans(self, orphan_rows):
|
||||
"""Convert orphan finding rows into the same API shape as mapping rows."""
|
||||
results = []
|
||||
for row in orphan_rows:
|
||||
status_val = row["status"]
|
||||
status = status_val if status_val in ("FAIL", "PASS") else "MANUAL"
|
||||
|
||||
muted = bool(row["muted"])
|
||||
delta_val = row.get("delta")
|
||||
delta = delta_val if delta_val in ("new", "changed") and not muted else None
|
||||
|
||||
finding_id = str(row["id"])
|
||||
|
||||
results.append(
|
||||
{
|
||||
"row_id": finding_id,
|
||||
"resource_id": None,
|
||||
"resource_uid": row["uid"],
|
||||
"resource_name": row["uid"],
|
||||
"resource_service": row["_svc"] or "",
|
||||
"resource_region": row["_region"] or "",
|
||||
"resource_type": row["_rtype"] or "",
|
||||
"provider_type": row["_provider_type"],
|
||||
"provider_uid": row["_provider_uid"],
|
||||
"provider_alias": row["_provider_alias"],
|
||||
"status": status,
|
||||
"severity": row["severity"],
|
||||
"delta": delta,
|
||||
"first_seen_at": row["first_seen_at"],
|
||||
"last_seen_at": row["inserted_at"],
|
||||
"muted": muted,
|
||||
"muted_reason": row.get("muted_reason"),
|
||||
"resource_group": row["_rgroup"] or "",
|
||||
"finding_id": finding_id,
|
||||
"finding_id": (
|
||||
str(row["finding_id"]) if row.get("finding_id") else None
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -7788,6 +7683,11 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
)
|
||||
)
|
||||
|
||||
# delta_order is a virtual sort field: expand it to a
|
||||
# lexicographic ordering by (new_count, changed_count) so groups
|
||||
# with more new findings rank higher, with changed_count as the
|
||||
# tie-breaker (preserves the "new > changed" priority used by
|
||||
# the resources endpoint, but driven by the actual counters).
|
||||
expanded_ordering = []
|
||||
for field in ordering:
|
||||
if field.lstrip("-") == "delta_order":
|
||||
@@ -7821,64 +7721,41 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
def _paginated_resource_response(
|
||||
self, request, filtered_queryset, resource_ids, tenant_id
|
||||
):
|
||||
"""Paginate and return resources, appending orphan findings when present.
|
||||
"""Paginate and return resources.
|
||||
|
||||
Hot path (no orphans, or resource filter applied): resources come from
|
||||
ResourceFindingMapping aggregation. Untouched pre-existing behaviour.
|
||||
|
||||
Orphan fallback: findings without a mapping (e.g. IaC) are appended
|
||||
after mapping rows as synthesised resource-like rows so they remain
|
||||
visible in the UI without paying the aggregation cost on the hot path.
|
||||
Without sort: paginate lightweight resource IDs first, aggregate only the page.
|
||||
With sort: build a lightweight ordering subquery (resource_id + sort keys),
|
||||
paginate that, then aggregate full details only for the page.
|
||||
"""
|
||||
sort_param = request.query_params.get("sort")
|
||||
ordering = None
|
||||
|
||||
if sort_param:
|
||||
validated = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
|
||||
ordering = validated if validated else None
|
||||
ordering = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
|
||||
if ordering:
|
||||
if "resource_id" not in {field.lstrip("-") for field in ordering}:
|
||||
ordering.append("resource_id")
|
||||
|
||||
# Resource filters can only match findings with resources; skip orphan
|
||||
# detection entirely when they are present.
|
||||
if resource_ids is not None:
|
||||
return self._mapping_paginated_response(
|
||||
request, filtered_queryset, resource_ids, tenant_id, ordering
|
||||
)
|
||||
# Phase 1: lightweight aggregation with only sort keys, paginate
|
||||
ordering_qs = self._build_resource_ordering_queryset(
|
||||
filtered_queryset,
|
||||
resource_ids=resource_ids,
|
||||
tenant_id=tenant_id,
|
||||
ordering=ordering,
|
||||
)
|
||||
page = self.paginate_queryset(ordering_qs)
|
||||
if page is not None:
|
||||
page_ids = [row["resource_id"] for row in page]
|
||||
resource_data = self._build_resource_aggregation(
|
||||
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
|
||||
)
|
||||
# Re-sort to match the page ordering
|
||||
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
|
||||
results = self._post_process_resources(resource_data)
|
||||
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
|
||||
serializer = FindingGroupResourceSerializer(results, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
has_mappings = self._build_resource_mapping_queryset(
|
||||
filtered_queryset, resource_ids=None, tenant_id=tenant_id
|
||||
).exists()
|
||||
|
||||
if has_mappings:
|
||||
# Normal or mixed group: serve only resource-mapped rows.
|
||||
# TODO: Orphan findings in mixed groups are intentionally excluded
|
||||
# until the ephemeral resources strategy is decided. When resolved,
|
||||
# route mixed groups to _combined_paginated_response instead.
|
||||
return self._mapping_paginated_response(
|
||||
request, filtered_queryset, resource_ids, tenant_id, ordering
|
||||
)
|
||||
|
||||
# Pure orphan group (e.g. IaC): synthesize resource-like rows.
|
||||
return self._combined_paginated_response(
|
||||
request, filtered_queryset, tenant_id, ordering
|
||||
)
|
||||
|
||||
def _mapping_paginated_response(
|
||||
self, request, filtered_queryset, resource_ids, tenant_id, ordering
|
||||
):
|
||||
"""Mapping-only paginated response (original fast path)."""
|
||||
if ordering:
|
||||
if "resource_id" not in {field.lstrip("-") for field in ordering}:
|
||||
ordering.append("resource_id")
|
||||
|
||||
# Phase 1: lightweight aggregation with only sort keys, paginate
|
||||
ordering_qs = self._build_resource_ordering_queryset(
|
||||
filtered_queryset,
|
||||
resource_ids=resource_ids,
|
||||
tenant_id=tenant_id,
|
||||
ordering=ordering,
|
||||
)
|
||||
page = self.paginate_queryset(ordering_qs)
|
||||
if page is not None:
|
||||
page_ids = [row["resource_id"] for row in page]
|
||||
page_ids = [row["resource_id"] for row in ordering_qs]
|
||||
resource_data = self._build_resource_aggregation(
|
||||
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
|
||||
)
|
||||
@@ -7886,18 +7763,10 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
results = self._post_process_resources(resource_data)
|
||||
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
|
||||
serializer = FindingGroupResourceSerializer(results, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
page_ids = [row["resource_id"] for row in ordering_qs]
|
||||
resource_data = self._build_resource_aggregation(
|
||||
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
|
||||
)
|
||||
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
|
||||
results = self._post_process_resources(resource_data)
|
||||
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
|
||||
serializer = FindingGroupResourceSerializer(results, many=True)
|
||||
return Response(serializer.data)
|
||||
return Response(serializer.data)
|
||||
|
||||
# No sort (or only empty sort fragments): paginate lightweight resource IDs
|
||||
# first, aggregate only the page.
|
||||
mapping_qs = self._build_resource_mapping_queryset(
|
||||
filtered_queryset, resource_ids=resource_ids, tenant_id=tenant_id
|
||||
)
|
||||
@@ -7925,95 +7794,6 @@ class FindingGroupViewSet(BaseRLSViewSet):
|
||||
serializer = FindingGroupResourceSerializer(results, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
def _combined_paginated_response(
|
||||
self, request, filtered_queryset, tenant_id, ordering
|
||||
):
|
||||
"""Mapping rows + orphan findings appended at end.
|
||||
|
||||
Orphans sit after mapping rows regardless of sort. This keeps the
|
||||
mapping-only code path intact for checks that have no orphans (the
|
||||
common case) and avoids paying UNION/coalesce costs there.
|
||||
"""
|
||||
mapping_qs = self._build_resource_mapping_queryset(
|
||||
filtered_queryset, resource_ids=None, tenant_id=tenant_id
|
||||
)
|
||||
mapping_count = mapping_qs.values("resource_id").distinct().count()
|
||||
|
||||
orphan_ids = list(
|
||||
self._orphan_findings_queryset(filtered_queryset)
|
||||
.order_by("id")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
orphan_count = len(orphan_ids)
|
||||
total = mapping_count + orphan_count
|
||||
|
||||
# Paginate a simple [0..total) index sequence so DRF produces proper
|
||||
# links/meta; then slice mapping / orphan sources accordingly.
|
||||
page = self.paginate_queryset(range(total))
|
||||
page_indices = list(page) if page is not None else list(range(total))
|
||||
|
||||
mapping_indices = [i for i in page_indices if i < mapping_count]
|
||||
orphan_positions = [
|
||||
i - mapping_count for i in page_indices if i >= mapping_count
|
||||
]
|
||||
|
||||
mapping_results = []
|
||||
if mapping_indices:
|
||||
start = mapping_indices[0]
|
||||
stop = mapping_indices[-1] + 1
|
||||
if ordering:
|
||||
ordering_fields = list(ordering)
|
||||
if "resource_id" not in {
|
||||
field.lstrip("-") for field in ordering_fields
|
||||
}:
|
||||
ordering_fields.append("resource_id")
|
||||
ordered_qs = self._build_resource_ordering_queryset(
|
||||
filtered_queryset,
|
||||
resource_ids=None,
|
||||
tenant_id=tenant_id,
|
||||
ordering=ordering_fields,
|
||||
)
|
||||
slice_rids = [row["resource_id"] for row in ordered_qs[start:stop]]
|
||||
else:
|
||||
slice_rids = list(
|
||||
mapping_qs.values_list("resource_id", flat=True)
|
||||
.distinct()
|
||||
.order_by("resource_id")[start:stop]
|
||||
)
|
||||
if slice_rids:
|
||||
resource_data = self._build_resource_aggregation(
|
||||
filtered_queryset,
|
||||
resource_ids=slice_rids,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
rows_by_rid = {row["resource_id"]: row for row in resource_data}
|
||||
ordered_rows = [
|
||||
rows_by_rid[rid] for rid in slice_rids if rid in rows_by_rid
|
||||
]
|
||||
mapping_results = self._post_process_resources(ordered_rows)
|
||||
|
||||
orphan_results = []
|
||||
if orphan_positions:
|
||||
slice_fids = [orphan_ids[pos] for pos in orphan_positions]
|
||||
raw_rows = list(
|
||||
self._orphan_aggregation_values(
|
||||
self._orphan_findings_queryset(
|
||||
filtered_queryset, finding_ids=slice_fids
|
||||
)
|
||||
)
|
||||
)
|
||||
rows_by_fid = {row["id"]: row for row in raw_rows}
|
||||
ordered_rows = [
|
||||
rows_by_fid[fid] for fid in slice_fids if fid in rows_by_fid
|
||||
]
|
||||
orphan_results = self._post_process_orphans(ordered_rows)
|
||||
|
||||
results = mapping_results + orphan_results
|
||||
serializer = FindingGroupResourceSerializer(results, many=True)
|
||||
if page is not None:
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(serializer.data)
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""
|
||||
List finding groups with aggregation and filtering.
|
||||
|
||||
@@ -15,7 +15,7 @@ from config.django.production import LOGGING as DJANGO_LOGGERS, DEBUG # noqa: E
|
||||
from config.custom_logging import BackendLogger # noqa: E402
|
||||
|
||||
BIND_ADDRESS = env("DJANGO_BIND_ADDRESS", default="127.0.0.1")
|
||||
PORT = env("DJANGO_PORT", default=8080)
|
||||
PORT = env("DJANGO_PORT", default=8000)
|
||||
|
||||
# Server settings
|
||||
bind = f"{BIND_ADDRESS}:{PORT}"
|
||||
|
||||
@@ -5,6 +5,7 @@ This module handles:
|
||||
- Adding resource labels to Cartography nodes for efficient lookups
|
||||
- Loading Prowler findings into the graph
|
||||
- Linking findings to resources
|
||||
- Cleaning up stale findings
|
||||
"""
|
||||
|
||||
from collections import defaultdict
|
||||
@@ -23,6 +24,7 @@ from tasks.jobs.attack_paths.config import (
|
||||
)
|
||||
from tasks.jobs.attack_paths.queries import (
|
||||
ADD_RESOURCE_LABEL_TEMPLATE,
|
||||
CLEANUP_FINDINGS_TEMPLATE,
|
||||
INSERT_FINDING_TEMPLATE,
|
||||
render_cypher_template,
|
||||
)
|
||||
@@ -90,13 +92,14 @@ def analysis(
|
||||
"""
|
||||
Main entry point for Prowler findings analysis.
|
||||
|
||||
Adds resource labels and loads findings.
|
||||
Adds resource labels, loads findings, and cleans up stale data.
|
||||
"""
|
||||
add_resource_label(
|
||||
neo4j_session, prowler_api_provider.provider, str(prowler_api_provider.uid)
|
||||
)
|
||||
findings_data = stream_findings_with_resources(prowler_api_provider, scan_id)
|
||||
load_findings(neo4j_session, findings_data, prowler_api_provider, config)
|
||||
cleanup_findings(neo4j_session, prowler_api_provider, config)
|
||||
|
||||
|
||||
def add_resource_label(
|
||||
@@ -180,6 +183,28 @@ def load_findings(
|
||||
logger.info(f"Finished loading {total_records} records in {batch_num} batches")
|
||||
|
||||
|
||||
def cleanup_findings(
|
||||
neo4j_session: neo4j.Session,
|
||||
prowler_api_provider: Provider,
|
||||
config: CartographyConfig,
|
||||
) -> None:
|
||||
"""Remove stale findings (classic Cartography behaviour)."""
|
||||
parameters = {
|
||||
"last_updated": config.update_tag,
|
||||
"batch_size": BATCH_SIZE,
|
||||
}
|
||||
|
||||
batch = 1
|
||||
deleted_count = 1
|
||||
while deleted_count > 0:
|
||||
logger.info(f"Cleaning findings batch {batch}")
|
||||
|
||||
result = neo4j_session.run(CLEANUP_FINDINGS_TEMPLATE, parameters)
|
||||
|
||||
deleted_count = result.single().get("deleted_findings_count", 0)
|
||||
batch += 1
|
||||
|
||||
|
||||
# Findings Streaming (Generator-based)
|
||||
# -------------------------------------
|
||||
|
||||
@@ -248,9 +273,7 @@ def _fetch_findings_batch(
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
# Use `all_objects` to get `Findings` even on soft-deleted `Providers`
|
||||
# But even the provider is already validated as active in this context
|
||||
qs = FindingModel.all_objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).order_by("id")
|
||||
qs = FindingModel.all_objects.filter(scan_id=scan_id).order_by("id")
|
||||
|
||||
if after_id is not None:
|
||||
qs = qs.filter(id__gt=after_id)
|
||||
|
||||
@@ -13,13 +13,14 @@ from tasks.jobs.attack_paths.config import (
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
# Indexes for Prowler Findings and resource lookups
|
||||
# Indexes for Prowler findings and resource lookups
|
||||
FINDINGS_INDEX_STATEMENTS = [
|
||||
# Resource indexes for Prowler Finding lookups
|
||||
"CREATE INDEX aws_resource_arn IF NOT EXISTS FOR (n:_AWSResource) ON (n.arn);",
|
||||
"CREATE INDEX aws_resource_id IF NOT EXISTS FOR (n:_AWSResource) ON (n.id);",
|
||||
# Prowler Finding indexes
|
||||
f"CREATE INDEX prowler_finding_id IF NOT EXISTS FOR (n:{PROWLER_FINDING_LABEL}) ON (n.id);",
|
||||
f"CREATE INDEX prowler_finding_lastupdated IF NOT EXISTS FOR (n:{PROWLER_FINDING_LABEL}) ON (n.lastupdated);",
|
||||
f"CREATE INDEX prowler_finding_status IF NOT EXISTS FOR (n:{PROWLER_FINDING_LABEL}) ON (n.status);",
|
||||
# Internet node index for MERGE lookups
|
||||
f"CREATE INDEX internet_id IF NOT EXISTS FOR (n:{INTERNET_NODE_LABEL}) ON (n.id);",
|
||||
|
||||
@@ -80,6 +80,17 @@ INSERT_FINDING_TEMPLATE = f"""
|
||||
rel.lastupdated = $last_updated
|
||||
"""
|
||||
|
||||
CLEANUP_FINDINGS_TEMPLATE = f"""
|
||||
MATCH (finding:{PROWLER_FINDING_LABEL})
|
||||
WHERE finding.lastupdated < $last_updated
|
||||
|
||||
WITH finding LIMIT $batch_size
|
||||
|
||||
DETACH DELETE finding
|
||||
|
||||
RETURN COUNT(finding) AS deleted_findings_count
|
||||
"""
|
||||
|
||||
# Internet queries (used by internet.py)
|
||||
# ---------------------------------------
|
||||
|
||||
|
||||
@@ -1298,6 +1298,23 @@ class TestAttackPathsFindingsHelpers:
|
||||
assert params["last_updated"] == config.update_tag
|
||||
assert "findings_data" in params
|
||||
|
||||
def test_cleanup_findings_runs_batches(self, providers_fixture):
|
||||
provider = providers_fixture[0]
|
||||
config = SimpleNamespace(update_tag=1024)
|
||||
mock_session = MagicMock()
|
||||
|
||||
first_batch = MagicMock()
|
||||
first_batch.single.return_value = {"deleted_findings_count": 3}
|
||||
second_batch = MagicMock()
|
||||
second_batch.single.return_value = {"deleted_findings_count": 0}
|
||||
mock_session.run.side_effect = [first_batch, second_batch]
|
||||
|
||||
findings_module.cleanup_findings(mock_session, provider, config)
|
||||
|
||||
assert mock_session.run.call_count == 2
|
||||
params = mock_session.run.call_args.args[1]
|
||||
assert params["last_updated"] == config.update_tag
|
||||
|
||||
def test_stream_findings_with_resources_returns_latest_scan_data(
|
||||
self,
|
||||
tenants_fixture,
|
||||
|
||||
@@ -34,10 +34,6 @@ spec:
|
||||
securityContext:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.worker.initContainers }}
|
||||
initContainers:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
containers:
|
||||
- name: worker
|
||||
{{- with .Values.worker.securityContext }}
|
||||
|
||||
@@ -32,10 +32,6 @@ spec:
|
||||
securityContext:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.worker_beat.initContainers }}
|
||||
initContainers:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
containers:
|
||||
- name: worker-beat
|
||||
{{- with .Values.worker_beat.securityContext }}
|
||||
|
||||
@@ -220,7 +220,7 @@ def _send_prowler_results(prowler_results, _prowler_version, options):
|
||||
try:
|
||||
_debug("RESULT MSG --- {0}".format(_check_result), 2)
|
||||
_check_result = json.loads(TEMPLATE_CHECK.format(_check_result))
|
||||
except Exception:
|
||||
except:
|
||||
_debug(
|
||||
"INVALID JSON --- {0}".format(TEMPLATE_CHECK.format(_check_result)), 1
|
||||
)
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
services:
|
||||
api-dev-init:
|
||||
image: busybox:1.37.0
|
||||
volumes:
|
||||
- ./_data/api:/data
|
||||
command: ["sh", "-c", "chown -R 1000:1000 /data"]
|
||||
restart: "no"
|
||||
|
||||
api-dev:
|
||||
hostname: "prowler-api"
|
||||
image: prowler-api-dev
|
||||
@@ -28,20 +21,12 @@ services:
|
||||
- ./_data/api:/home/prowler/.config/prowler-api
|
||||
- outputs:/tmp/prowler_api_output
|
||||
depends_on:
|
||||
api-dev-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
neo4j:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q -O /dev/null http://127.0.0.1:${DJANGO_PORT:-8080}/api/v1/ || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 60s
|
||||
entrypoint:
|
||||
- "/home/prowler/docker-entrypoint.sh"
|
||||
- "dev"
|
||||
@@ -154,7 +139,11 @@ services:
|
||||
- ./api/docker-entrypoint.sh:/home/prowler/docker-entrypoint.sh
|
||||
- outputs:/tmp/prowler_api_output
|
||||
depends_on:
|
||||
api-dev:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
neo4j:
|
||||
condition: service_healthy
|
||||
ulimits:
|
||||
nofile:
|
||||
@@ -176,7 +165,11 @@ services:
|
||||
- path: ./.env
|
||||
required: false
|
||||
depends_on:
|
||||
api-dev:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
neo4j:
|
||||
condition: service_healthy
|
||||
ulimits:
|
||||
nofile:
|
||||
|
||||
@@ -5,13 +5,6 @@
|
||||
# docker compose -f docker-compose-dev.yml up
|
||||
#
|
||||
services:
|
||||
api-init:
|
||||
image: busybox:1.37.0
|
||||
volumes:
|
||||
- ./_data/api:/data
|
||||
command: ["sh", "-c", "chown -R 1000:1000 /data"]
|
||||
restart: "no"
|
||||
|
||||
api:
|
||||
hostname: "prowler-api"
|
||||
image: prowlercloud/prowler-api:${PROWLER_API_VERSION:-stable}
|
||||
@@ -24,20 +17,12 @@ services:
|
||||
- ./_data/api:/home/prowler/.config/prowler-api
|
||||
- output:/tmp/prowler_api_output
|
||||
depends_on:
|
||||
api-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
neo4j:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q -O /dev/null http://127.0.0.1:${DJANGO_PORT:-8080}/api/v1/ || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 60s
|
||||
entrypoint:
|
||||
- "/home/prowler/docker-entrypoint.sh"
|
||||
- "prod"
|
||||
@@ -129,7 +114,9 @@ services:
|
||||
volumes:
|
||||
- "output:/tmp/prowler_api_output"
|
||||
depends_on:
|
||||
api:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
ulimits:
|
||||
nofile:
|
||||
@@ -145,7 +132,9 @@ services:
|
||||
- path: ./.env
|
||||
required: false
|
||||
depends_on:
|
||||
api:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
ulimits:
|
||||
nofile:
|
||||
|
||||
@@ -118,22 +118,18 @@ In case you have any doubts, consult the [Poetry environment activation guide](h
|
||||
|
||||
### Pre-Commit Hooks
|
||||
|
||||
This repository uses Git pre-commit hooks managed by the [prek](https://prek.j178.dev/) tool, it is installed with `poetry install --with dev`. Next, run the following command in the root of this repository:
|
||||
This repository uses Git pre-commit hooks managed by the [pre-commit](https://pre-commit.com/) tool, it is installed with `poetry install --with dev`. Next, run the following command in the root of this repository:
|
||||
|
||||
```shell
|
||||
prek install
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
Successful installation should produce the following output:
|
||||
|
||||
```shell
|
||||
prek installed at `.git/hooks/pre-commit`
|
||||
pre-commit installed at .git/hooks/pre-commit
|
||||
```
|
||||
|
||||
<Warning>
|
||||
If pre-commit hooks were previously installed, run `prek install --overwrite` to replace the existing hook. Otherwise, both tools will run on each commit.
|
||||
</Warning>
|
||||
|
||||
### Code Quality and Security Checks
|
||||
|
||||
Before merging pull requests, several automated checks and utilities ensure code security and updated dependencies:
|
||||
|
||||
@@ -15,7 +15,8 @@ This document describes the internal architecture of Prowler Lighthouse AI, enab
|
||||
|
||||
Lighthouse AI operates as a Langchain-based agent that connects Large Language Models (LLMs) with Prowler security data through the Model Context Protocol (MCP).
|
||||
|
||||

|
||||
<img className="block dark:hidden" src="/images/lighthouse-architecture-light.png" alt="Prowler Lighthouse Architecture" />
|
||||
<img className="hidden dark:block" src="/images/lighthouse-architecture-dark.png" alt="Prowler Lighthouse Architecture" />
|
||||
|
||||
### Three-Tier Architecture
|
||||
|
||||
|
||||
@@ -12,24 +12,6 @@
|
||||
"dark": "/images/prowler-logo-white.png",
|
||||
"light": "/images/prowler-logo-black.png"
|
||||
},
|
||||
"contextual": {
|
||||
"options": [
|
||||
"copy",
|
||||
"view",
|
||||
{
|
||||
"title": "Request a feature",
|
||||
"description": "Open a feature request on GitHub",
|
||||
"icon": "plus",
|
||||
"href": "https://github.com/prowler-cloud/prowler/issues/new?template=feature-request.yml"
|
||||
},
|
||||
{
|
||||
"title": "Report an issue",
|
||||
"description": "Open a bug report on GitHub",
|
||||
"icon": "bug",
|
||||
"href": "https://github.com/prowler-cloud/prowler/issues/new?template=bug_report.yml"
|
||||
}
|
||||
]
|
||||
},
|
||||
"navigation": {
|
||||
"tabs": [
|
||||
{
|
||||
@@ -151,7 +133,6 @@
|
||||
]
|
||||
},
|
||||
"user-guide/tutorials/prowler-app-attack-paths",
|
||||
"user-guide/tutorials/prowler-app-finding-groups",
|
||||
"user-guide/tutorials/prowler-cloud-public-ips",
|
||||
{
|
||||
"group": "Tutorials",
|
||||
|
||||
@@ -121,8 +121,8 @@ To update the environment file:
|
||||
Edit the `.env` file and change version values:
|
||||
|
||||
```env
|
||||
PROWLER_UI_VERSION="5.24.0"
|
||||
PROWLER_API_VERSION="5.24.0"
|
||||
PROWLER_UI_VERSION="5.23.0"
|
||||
PROWLER_API_VERSION="5.23.0"
|
||||
```
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -59,10 +59,6 @@ Prowler Lighthouse AI is powerful, but there are limitations:
|
||||
- **NextJS session dependence**: If your Prowler application session expires or logs out, Lighthouse AI will error out. Refresh and log back in to continue.
|
||||
- **Response quality**: The response quality depends on the selected LLM provider and model. Choose models with strong tool-calling capabilities for best results. We recommend `gpt-5` model from OpenAI.
|
||||
|
||||
## Architecture
|
||||
|
||||

|
||||
|
||||
## Extending Lighthouse AI
|
||||
|
||||
Lighthouse AI retrieves data through Prowler MCP. To add new capabilities, extend the Prowler MCP Server with additional tools and Lighthouse AI discovers them automatically.
|
||||
|
||||
@@ -46,7 +46,8 @@ Search and retrieve official Prowler documentation:
|
||||
|
||||
The following diagram illustrates the Prowler MCP Server architecture and its integration points:
|
||||
|
||||

|
||||
<img className="block dark:hidden" src="/images/prowler_mcp_schema_light.png" alt="Prowler MCP Server Schema" />
|
||||
<img className="hidden dark:block" src="/images/prowler_mcp_schema_dark.png" alt="Prowler MCP Server Schema" />
|
||||
|
||||
The architecture shows how AI assistants connect through the MCP protocol to access Prowler's three main components:
|
||||
- Prowler Cloud/App for security operations
|
||||
|
||||
|
Before Width: | Height: | Size: 755 KiB |
|
Before Width: | Height: | Size: 146 KiB |
|
Before Width: | Height: | Size: 340 KiB |
|
Before Width: | Height: | Size: 410 KiB |
|
After Width: | Height: | Size: 267 KiB |
|
After Width: | Height: | Size: 265 KiB |
@@ -1,37 +0,0 @@
|
||||
flowchart TB
|
||||
browser([Browser])
|
||||
|
||||
subgraph NEXTJS["Next.js Server"]
|
||||
route["API Route<br/>(auth + context assembly)"]
|
||||
agent["LangChain Agent"]
|
||||
|
||||
subgraph TOOLS["Agent Tools"]
|
||||
metatools["Meta-tools<br/>describe_tool / execute_tool / load_skill"]
|
||||
end
|
||||
|
||||
mcpclient["MCP Client<br/>(HTTP transport)"]
|
||||
end
|
||||
|
||||
llm["LLM Provider<br/>(OpenAI / Bedrock / OpenAI-compatible)"]
|
||||
|
||||
subgraph MCP["Prowler MCP Server"]
|
||||
app_tools["prowler_app_* tools<br/>(auth required)"]
|
||||
hub_tools["prowler_hub_* tools<br/>(no auth)"]
|
||||
docs_tools["prowler_docs_* tools<br/>(no auth)"]
|
||||
end
|
||||
|
||||
api["Prowler API"]
|
||||
hub["hub.prowler.com"]
|
||||
docs["docs.prowler.com<br/>(Mintlify)"]
|
||||
|
||||
browser <-->|SSE stream| route
|
||||
route --> agent
|
||||
agent <-->|LLM API| llm
|
||||
agent --> metatools
|
||||
metatools --> mcpclient
|
||||
mcpclient -->|MCP HTTP · Bearer token<br/>for prowler_app_* only| app_tools
|
||||
mcpclient -->|MCP HTTP| hub_tools
|
||||
mcpclient -->|MCP HTTP| docs_tools
|
||||
app_tools -->|REST| api
|
||||
hub_tools -->|REST| hub
|
||||
docs_tools -->|REST| docs
|
||||
|
Before Width: | Height: | Size: 286 KiB |
@@ -23,8 +23,6 @@ flowchart TB
|
||||
user --> ui
|
||||
user --> cli
|
||||
ui -->|REST| api
|
||||
ui -->|MCP HTTP| mcp
|
||||
mcp -->|REST| api
|
||||
api --> pg
|
||||
api --> valkey
|
||||
beat -->|enqueue jobs| valkey
|
||||
@@ -33,5 +31,7 @@ flowchart TB
|
||||
worker -->|Attack Paths| neo4j
|
||||
worker -->|invokes| sdk
|
||||
cli --> sdk
|
||||
api -. AI tools .-> mcp
|
||||
mcp -. context .-> api
|
||||
|
||||
sdk --> providers
|
||||
|
||||
|
Before Width: | Height: | Size: 348 KiB After Width: | Height: | Size: 268 KiB |
@@ -1,29 +0,0 @@
|
||||
flowchart LR
|
||||
subgraph HOSTS["MCP Hosts"]
|
||||
chat["Chat Interfaces<br/>(Claude Desktop, LobeChat)"]
|
||||
ide["IDEs and Code Editors<br/>(Claude Code, Cursor)"]
|
||||
apps["Other AI Applications<br/>(5ire, custom agents)"]
|
||||
end
|
||||
|
||||
subgraph MCP["Prowler MCP Server"]
|
||||
app_tools["prowler_app_* tools<br/>(JWT or API key auth)<br/>Findings · Providers · Scans<br/>Resources · Muting · Compliance<br/>Attack Paths"]
|
||||
hub_tools["prowler_hub_* tools<br/>(no auth)<br/>Checks Catalog · Check Code<br/>Fixers · Compliance Frameworks"]
|
||||
docs_tools["prowler_docs_* tools<br/>(no auth)<br/>Search · Document Retrieval"]
|
||||
end
|
||||
|
||||
api["Prowler API<br/>(REST)"]
|
||||
hub["hub.prowler.com<br/>(REST)"]
|
||||
docs["docs.prowler.com<br/>(Mintlify)"]
|
||||
|
||||
chat -->|STDIO or HTTP| app_tools
|
||||
chat -->|STDIO or HTTP| hub_tools
|
||||
chat -->|STDIO or HTTP| docs_tools
|
||||
ide -->|STDIO or HTTP| app_tools
|
||||
ide -->|STDIO or HTTP| hub_tools
|
||||
ide -->|STDIO or HTTP| docs_tools
|
||||
apps -->|STDIO or HTTP| app_tools
|
||||
apps -->|STDIO or HTTP| hub_tools
|
||||
apps -->|STDIO or HTTP| docs_tools
|
||||
app_tools -->|REST| api
|
||||
hub_tools -->|REST| hub
|
||||
docs_tools -->|REST| docs
|
||||
|
Before Width: | Height: | Size: 371 KiB |
|
After Width: | Height: | Size: 328 KiB |
|
After Width: | Height: | Size: 332 KiB |
@@ -33,41 +33,6 @@ To scan a particular AWS region with Prowler, use:
|
||||
prowler aws -f/--region eu-west-1 us-east-1
|
||||
```
|
||||
|
||||
### Excluding Specific Regions
|
||||
|
||||
To scan all supported AWS regions except a specific subset, use the `--excluded-region` flag:
|
||||
|
||||
```console
|
||||
prowler aws --excluded-region eu-west-1 me-south-1
|
||||
```
|
||||
|
||||
You can also configure the exclusion list with the `PROWLER_AWS_DISALLOWED_REGIONS` environment variable as a comma-separated list:
|
||||
|
||||
```console
|
||||
export PROWLER_AWS_DISALLOWED_REGIONS="eu-west-1,me-south-1"
|
||||
prowler aws
|
||||
```
|
||||
|
||||
Or with the AWS provider configuration in `config.yaml`:
|
||||
|
||||
```yaml
|
||||
aws:
|
||||
disallowed_regions:
|
||||
- eu-west-1
|
||||
- me-south-1
|
||||
```
|
||||
|
||||
When more than one source is set, precedence is:
|
||||
|
||||
1. `--excluded-region`
|
||||
2. `PROWLER_AWS_DISALLOWED_REGIONS`
|
||||
3. `aws.disallowed_regions` in `config.yaml`
|
||||
|
||||
<Note>
|
||||
For self-hosted App or API-triggered scans, set `PROWLER_AWS_DISALLOWED_REGIONS` in the runtime environment of the backend scan containers such as `api` and `worker`. The `ui` container does not enforce AWS region selection.
|
||||
|
||||
</Note>
|
||||
|
||||
### AWS Credentials Configuration
|
||||
|
||||
For details on configuring AWS credentials, refer to the following [Botocore](https://github.com/boto/botocore) [file](https://github.com/boto/botocore/blob/22a19ea7c4c2c4dd7df4ab8c32733cba0c7597a4/botocore/data/partitions.json).
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
---
|
||||
title: 'Finding Groups'
|
||||
description: 'Organize and triage security findings by check to reduce noise and prioritize remediation effectively.'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.23.0" />
|
||||
|
||||
Finding Groups transforms security findings triage by grouping them by check instead of displaying a flat list. This dramatically reduces noise and enables faster, more effective prioritization.
|
||||
|
||||
## Triage Challenges with Flat Finding Lists
|
||||
|
||||
A real cloud environment produces thousands of findings per scan. A flat list makes it impossible to triage effectively:
|
||||
|
||||
- **Signal buried in noise**: the same misconfiguration repeated across 200 resources shows up as 200 rows, burying the signal in repetitive data
|
||||
- **Prioritization guesswork**: without grouping, understanding which issues affect the most resources requires manual counting and correlation
|
||||
- **Tedious muting**: muting a false positive globally requires manually acting on each individual finding across the list
|
||||
- **Lost context**: when investigating a single resource, related findings are scattered across the same flat list, making it hard to see the full picture
|
||||
|
||||
## How Finding Groups Addresses These Challenges
|
||||
|
||||
Finding Groups addresses these challenges by intelligently grouping findings by check.
|
||||
|
||||
### Grouped View at a Glance
|
||||
|
||||
Each row represents a single check title with key information immediately visible:
|
||||
|
||||
- **Severity** indicator for quick risk assessment
|
||||
- **Impacted providers** showing which cloud platforms are affected
|
||||
- **X of Y impacted resources** counter displaying how many resources fail this check
|
||||
|
||||
For example, `Vercel project has the Web Application Firewall enabled` across every affected project collapses to a single row — not one per project. Sort or filter by severity, provider, or status at the group level to triage top-down instead of drowning in per-resource rows.
|
||||
|
||||

|
||||
|
||||
### Expanding Groups for Details
|
||||
|
||||
Expand any group inline to see the failing resources with detailed information:
|
||||
|
||||
| Column | Description |
|
||||
|--------|-------------|
|
||||
| **UID** | Unique identifier for the resource |
|
||||
| **Service** | The cloud service the resource belongs to |
|
||||
| **Region** | Geographic region where the resource is deployed |
|
||||
| **Severity** | Risk level of the finding |
|
||||
| **Provider** | Cloud provider (AWS, Azure, GCP, Kubernetes, etc.) |
|
||||
| **Last Seen** | When the finding was last detected |
|
||||
| **Failing For** | Duration the resource has been in a failing state |
|
||||
|
||||

|
||||
|
||||
### Resource Detail Drawer
|
||||
|
||||
Select any resource to open the detail drawer with full finding context:
|
||||
|
||||
- **Risk**: the security risk associated with this finding
|
||||
- **Description**: detailed explanation of what was detected
|
||||
- **Status Extended**: additional status information and context
|
||||
- **Remediation**: step-by-step guidance to resolve the issue
|
||||
- **View in Prowler Hub**: direct link to explore the check in Prowler Hub
|
||||
- **Analyze This Finding With Lighthouse AI**: one-click AI-powered analysis for deeper insights
|
||||
|
||||

|
||||
|
||||
### Bulk Actions
|
||||
|
||||
Bulk-mute an entire group instead of chasing duplicates across the list. This is especially useful for:
|
||||
|
||||
- Known false positives that appear across many resources
|
||||
- Findings in development or test environments
|
||||
- Accepted risks that have been documented and approved
|
||||
|
||||
<Warning>
|
||||
Muting findings does not resolve underlying security issues. Review each finding carefully before muting to ensure it represents an acceptable risk or has been properly addressed.
|
||||
</Warning>
|
||||
|
||||
## Other Findings for This Resource
|
||||
|
||||
Inside the resource detail drawer, the **Other Findings For This Resource** tab lists every finding that hits the same resource — passing, failing, and muted — alongside the one currently being reviewed.
|
||||
|
||||

|
||||
|
||||
### Why This Matters
|
||||
|
||||
When reviewing "WAF not enabled" on a Vercel project, the tab immediately shows:
|
||||
|
||||
- Skew protection status
|
||||
- Rate limiting configuration
|
||||
- IP blocking settings
|
||||
- Custom firewall rules
|
||||
- Password protection findings
|
||||
|
||||
All for that same project, without navigating back to the main list and filtering by resource UID.
|
||||
|
||||
### Complete Context Within the Drawer
|
||||
|
||||
Pair the Other Findings tab with:
|
||||
|
||||
- **Scans tab**: scan history for this resource
|
||||
- **Events tab**: changes and events over time
|
||||
|
||||
This provides full context without leaving the drawer.
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Start with high severity groups**: focus on critical and high severity groups first for maximum impact.
|
||||
2. **Use filters strategically**: filter by provider or status at the group level to narrow the triage scope.
|
||||
3. **Leverage bulk mute**: when a finding represents a confirmed false positive, mute the entire group at once.
|
||||
4. **Check related findings**: review the Other Findings tab to understand the full security posture of a resource.
|
||||
5. **Track failure duration**: use the "Failing For" column to prioritize long-standing issues that may indicate systemic problems.
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Navigate to the **Findings** section in Prowler Cloud/App.
|
||||
2. Toggle to the **Grouped View** to see findings organized by check.
|
||||
3. Select any group row to expand and see affected resources.
|
||||
4. Select a resource to open the detail drawer with full context.
|
||||
5. Use the **Other Findings For This Resource** tab to see all findings for that resource.
|
||||
@@ -25,7 +25,8 @@ Behind the scenes, Lighthouse AI works as follows:
|
||||
Lighthouse AI supports multiple LLM providers including OpenAI, Amazon Bedrock, and OpenAI-compatible services. For configuration details, see [Using Multiple LLM Providers with Lighthouse](/user-guide/tutorials/prowler-app-lighthouse-multi-llm).
|
||||
</Note>
|
||||
|
||||

|
||||
<img className="block dark:hidden" src="/images/lighthouse-architecture-light.png" alt="Prowler Lighthouse Architecture" />
|
||||
<img className="hidden dark:block" src="/images/lighthouse-architecture-dark.png" alt="Prowler Lighthouse Architecture" />
|
||||
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.3.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -1671,6 +1671,18 @@ files = [
|
||||
[package.dependencies]
|
||||
pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.4.0"
|
||||
description = "Validate configuration and produce human readable error messages."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
|
||||
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfn-lint"
|
||||
version = "1.38.0"
|
||||
@@ -2169,6 +2181,18 @@ files = [
|
||||
graph = ["objgraph (>=1.7.2)"]
|
||||
profile = ["gprof2dot (>=2022.7.29)"]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.4.0"
|
||||
description = "Distribution utilities"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"},
|
||||
{file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "distro"
|
||||
version = "1.9.0"
|
||||
@@ -2825,6 +2849,21 @@ files = [
|
||||
{file = "iamdata-0.1.202507281.tar.gz", hash = "sha256:4050870068ca2fb044d03c46229bc8dbafb4f99db2f50c77297aafd437154ddd"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.6.12"
|
||||
description = "File identification library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"},
|
||||
{file = "identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
license = ["ukkonen"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
@@ -3962,6 +4001,18 @@ plot = ["matplotlib"]
|
||||
tgrep = ["pyparsing"]
|
||||
twitter = ["twython"]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.9.1"
|
||||
description = "Node.js virtual environment builder"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
|
||||
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.0.2"
|
||||
@@ -4393,32 +4444,24 @@ files = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prek"
|
||||
version = "0.3.9"
|
||||
description = "A Git hook manager written in Rust, designed as a drop-in alternative to pre-commit."
|
||||
name = "pre-commit"
|
||||
version = "4.2.0"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "prek-0.3.9-py3-none-linux_armv6l.whl", hash = "sha256:3ed793d51bfaa27bddb64d525d7acb77a7c8644f549412d82252e3eb0b88aad8"},
|
||||
{file = "prek-0.3.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:399c58400c0bd0b82a93a3c09dc1bfd88d8d0cfb242d414d2ed247187b06ead1"},
|
||||
{file = "prek-0.3.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e2ea1ffb124e92f081b8e2ca5b5a623a733efb3be0c5b1f4b7ffe2ee17d1f20c"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:aaf639f95b7301639298311d8d44aad0d0b4864e9736083ad3c71ce9765d37ab"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff104863b187fa443ea8451ca55d51e2c6e94f99f00d88784b5c3c4c623f1ebe"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:039ecaf87c63a3e67cca645ebd5bc5eb6aafa6c9d929e9a27b2921e7849d7ef9"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3bde2a3d045705095983c7f78ba04f72a7565fe1c2b4e85f5628502a254754ff"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a0960a21543563e2c8e19aaad176cc8423a87aac3c914d0f313030d7a9244a"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:0dfb5d5171d7523271909246ee306b4dc3d5b63752e7dd7c7e8a8908fc9490d1"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:82b791bd36c1430c84d3ae7220a85152babc7eaf00f70adcb961bd594e756ba3"},
|
||||
{file = "prek-0.3.9-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:6eac6d2f736b041118f053a1487abed468a70dd85a8688eaf87bb42d3dcecf20"},
|
||||
{file = "prek-0.3.9-py3-none-musllinux_1_1_i686.whl", hash = "sha256:5517e46e761367a3759b3168eabc120840ffbca9dfbc53187167298a98f87dc4"},
|
||||
{file = "prek-0.3.9-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:92024778cf78683ca32687bb249ab6a7d5c33887b5ee1d1a9f6d0c14228f4cf3"},
|
||||
{file = "prek-0.3.9-py3-none-win32.whl", hash = "sha256:7f89c55e5f480f5d073769e319924ad69d4bf9f98c5cb46a83082e26e634c958"},
|
||||
{file = "prek-0.3.9-py3-none-win_amd64.whl", hash = "sha256:7722f3372eaa83b147e70a43cb7b9fe2128c13d0c78d8a1cdbf2a8ec2ee071eb"},
|
||||
{file = "prek-0.3.9-py3-none-win_arm64.whl", hash = "sha256:0bced6278d6cc8a4b46048979e36bc9da034611dc8facd77ab123177b833a929"},
|
||||
{file = "prek-0.3.9.tar.gz", hash = "sha256:f82b92d81f42f1f90a47f5fbbf492373e25ef1f790080215b2722dd6da66510e"},
|
||||
{file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"},
|
||||
{file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cfgv = ">=2.0.0"
|
||||
identify = ">=1.0.0"
|
||||
nodeenv = ">=0.11.1"
|
||||
pyyaml = ">=5.1"
|
||||
virtualenv = ">=20.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "propcache"
|
||||
version = "0.3.2"
|
||||
@@ -6240,6 +6283,27 @@ files = [
|
||||
{file = "uuid6-2024.7.10.tar.gz", hash = "sha256:2d29d7f63f593caaeea0e0d0dd0ad8129c9c663b29e19bdf882e864bedf18fb0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.32.0"
|
||||
description = "Virtual Python Environment builder"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "virtualenv-20.32.0-py3-none-any.whl", hash = "sha256:2c310aecb62e5aa1b06103ed7c2977b81e042695de2697d01017ff0f1034af56"},
|
||||
{file = "virtualenv-20.32.0.tar.gz", hash = "sha256:886bf75cadfdc964674e6e33eb74d787dff31ca314ceace03ca5810620f4ecf0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
distlib = ">=0.3.7,<1"
|
||||
filelock = ">=3.12.2,<4"
|
||||
platformdirs = ">=3.9.1,<5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
|
||||
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""]
|
||||
|
||||
[[package]]
|
||||
name = "vulture"
|
||||
version = "2.14"
|
||||
@@ -6681,4 +6745,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "786921163bb46716defae1d9de1df001af2abf17edd3061165638707bcd28ce4"
|
||||
content-hash = "4050d3a95f5bc5448576ca0361fd899b35aa04de28d379cdfd3c2b0db67848ad"
|
||||
|
||||
@@ -81,7 +81,7 @@ class {check_name}(Check):
|
||||
|
||||
## TECH STACK
|
||||
|
||||
Python 3.10+ | Poetry 2.3+ | pytest | moto (AWS mocking) | Pre-commit hooks (black, flake8, pylint, bandit)
|
||||
Python 3.10+ | Poetry 2+ | pytest | moto (AWS mocking) | Pre-commit hooks (black, flake8, pylint, bandit)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -2,46 +2,6 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [5.24.1] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Cloudflare account-scoped API tokens failing connection test in the App with `CloudflareUserTokenRequiredError` [(#10723)](https://github.com/prowler-cloud/prowler/pull/10723)
|
||||
- `prowler image --registry` failing with `ImageNoImagesProvidedError` due to registry arguments not being forwarded to `ImageProvider` in `init_global_provider` [(#10470)](https://github.com/prowler-cloud/prowler/pull/10470)
|
||||
- Google Workspace Calendar checks false FAIL on unconfigured settings with secure Google defaults [(#10726)](https://github.com/prowler-cloud/prowler/pull/10726)
|
||||
|
||||
---
|
||||
|
||||
## [5.24.0] (Prowler v5.24.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `entra_conditional_access_policy_directory_sync_account_excluded` check for M365 provider [(#10620)](https://github.com/prowler-cloud/prowler/pull/10620)
|
||||
- `intune_device_compliance_policy_unassigned_devices_not_compliant_by_default` check for M365 provider [(#10599)](https://github.com/prowler-cloud/prowler/pull/10599)
|
||||
- `entra_conditional_access_policy_all_apps_all_users` check for M365 provider [(#10619)](https://github.com/prowler-cloud/prowler/pull/10619)
|
||||
- `bedrock_full_access_policy_attached` check for AWS provider [(#10577)](https://github.com/prowler-cloud/prowler/pull/10577)
|
||||
- `iam_role_access_not_stale_to_bedrock` and `iam_user_access_not_stale_to_bedrock` checks for AWS provider [(#10536)](https://github.com/prowler-cloud/prowler/pull/10536)
|
||||
- `iam_policy_no_wildcard_marketplace_subscribe` and `iam_inline_policy_no_wildcard_marketplace_subscribe` checks for AWS provider [(#10525)](https://github.com/prowler-cloud/prowler/pull/10525)
|
||||
- `bedrock_vpc_endpoints_configured` check for AWS provider [(#10591)](https://github.com/prowler-cloud/prowler/pull/10591)
|
||||
- `exchange_organization_delicensing_resiliency_enabled` check for M365 provider [(#10608)](https://github.com/prowler-cloud/prowler/pull/10608)
|
||||
- `entra_conditional_access_policy_mfa_enforced_for_guest_users` check for M365 provider [(#10616)](https://github.com/prowler-cloud/prowler/pull/10616)
|
||||
- `entra_conditional_access_policy_corporate_device_sign_in_frequency_enforced` check for M365 provider [(#10618)](https://github.com/prowler-cloud/prowler/pull/10618)
|
||||
- `entra_conditional_access_policy_block_unknown_device_platforms` check for M365 provider [(#10615)](https://github.com/prowler-cloud/prowler/pull/10615)
|
||||
- `--excluded-region` CLI flag, `PROWLER_AWS_DISALLOWED_REGIONS` environment variable, and `aws.disallowed_regions` config entry to skip specific AWS regions during scans [(#10688)](https://github.com/prowler-cloud/prowler/pull/10688)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Bump Poetry to `2.3.4` and consolidate SDK workflows onto the `setup-python-poetry` composite action with opt-in lockfile regeneration [(#10681)](https://github.com/prowler-cloud/prowler/pull/10681)
|
||||
- Normalize Conditional Access platform values in Entra models and simplify platform-based checks [(#10635)](https://github.com/prowler-cloud/prowler/pull/10635)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `prowler image --registry-list` crashes with `AttributeError` because `ImageProvider.__init__` returns early before registering the global provider [(#10691)](https://github.com/prowler-cloud/prowler/pull/10691)
|
||||
- Vercel firewall config handling for team-scoped projects and current API response shapes [(#10695)](https://github.com/prowler-cloud/prowler/pull/10695)
|
||||
- Google Workspace Drive checks false FAIL on unconfigured settings with secure Google defaults [(#10727)](https://github.com/prowler-cloud/prowler/pull/10727)
|
||||
|
||||
---
|
||||
|
||||
## [5.23.0] (Prowler v5.23.0)
|
||||
|
||||
### 🚀 Added
|
||||
@@ -89,6 +49,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Oracle Cloud `kms_key_rotation_enabled` now checks current key version age to avoid false positives on vaults without auto-rotation support [(#10450)](https://github.com/prowler-cloud/prowler/pull/10450)
|
||||
- OCI filestorage, blockstorage, KMS, and compute services now honor `--region` for scanning outside the tenancy home region [(#10472)](https://github.com/prowler-cloud/prowler/pull/10472)
|
||||
- OCI provider now supports multi-region filtering via `--region` [(#10473)](https://github.com/prowler-cloud/prowler/pull/10473)
|
||||
- `prowler image --registry` failing with `ImageNoImagesProvidedError` due to registry arguments not being forwarded to `ImageProvider` in `init_global_provider` [(#10470)](https://github.com/prowler-cloud/prowler/pull/10470)
|
||||
- OCI multi-region support for identity client configuration in blockstorage, identity, and filestorage services [(#10520)](https://github.com/prowler-cloud/prowler/pull/10520)
|
||||
- Google Workspace Calendar checks now filter for customer-level policies only, skipping OU and group overrides that could produce incorrect audit results [(#10658)](https://github.com/prowler-cloud/prowler/pull/10658)
|
||||
|
||||
@@ -803,7 +764,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- S3 `test_connection` uses AWS S3 API `HeadBucket` instead of `GetBucketLocation` [(#8456)](https://github.com/prowler-cloud/prowler/pull/8456)
|
||||
- Add more validations to Azure Storage models when some values are None to avoid serialization issues [(#8325)](https://github.com/prowler-cloud/prowler/pull/8325)
|
||||
- `sns_topics_not_publicly_accessible` false positive with `aws:SourceArn` conditions [(#8326)](https://github.com/prowler-cloud/prowler/issues/8326)
|
||||
- Remove typo from description req 1.2.3 - Prowler ThreatScore M365 [(#8384)](https://github.com/prowler-cloud/prowler/pull/8384)
|
||||
- Remove typo from description req 1.2.3 - Prowler ThreatScore m365 [(#8384)](https://github.com/prowler-cloud/prowler/pull/8384)
|
||||
- Way of counting FAILED/PASS reqs from `kisa_isms_p_2023_aws` table [(#8382)](https://github.com/prowler-cloud/prowler/pull/8382)
|
||||
- Use default tenant domain instead of first domain in list for Azure and M365 providers [(#8402)](https://github.com/prowler-cloud/prowler/pull/8402)
|
||||
- Avoid multiple module error calls in M365 provider [(#8353)](https://github.com/prowler-cloud/prowler/pull/8353)
|
||||
@@ -844,7 +805,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
- Title & description wording for `iam_user_accesskey_unused` check for AWS provider [(#8233)](https://github.com/prowler-cloud/prowler/pull/8233)
|
||||
- Add GitHub provider to lateral panel in documentation and change -h environment variable output [(#8246)](https://github.com/prowler-cloud/prowler/pull/8246)
|
||||
- Show `M365_identity_type` and `M365_identity_id` in cloud reports [(#8247)](https://github.com/prowler-cloud/prowler/pull/8247)
|
||||
- Show `m365_identity_type` and `m365_identity_id` in cloud reports [(#8247)](https://github.com/prowler-cloud/prowler/pull/8247)
|
||||
- Ensure `is_service_role` only returns `True` for service roles [(#8274)](https://github.com/prowler-cloud/prowler/pull/8274)
|
||||
- Update DynamoDB check metadata to fix broken link [(#8273)](https://github.com/prowler-cloud/prowler/pull/8273)
|
||||
- Show correct count of findings in Dashboard Security Posture page [(#8270)](https://github.com/prowler-cloud/prowler/pull/8270)
|
||||
@@ -966,9 +927,9 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
### Fixed
|
||||
|
||||
- `M365_powershell test_credentials` to use sanitized credentials [(#7761)](https://github.com/prowler-cloud/prowler/pull/7761)
|
||||
- `m365_powershell test_credentials` to use sanitized credentials [(#7761)](https://github.com/prowler-cloud/prowler/pull/7761)
|
||||
- `admincenter_users_admins_reduced_license_footprint` check logic to pass when admin user has no license [(#7779)](https://github.com/prowler-cloud/prowler/pull/7779)
|
||||
- `M365_powershell` to close the PowerShell sessions in msgraph services [(#7816)](https://github.com/prowler-cloud/prowler/pull/7816)
|
||||
- `m365_powershell` to close the PowerShell sessions in msgraph services [(#7816)](https://github.com/prowler-cloud/prowler/pull/7816)
|
||||
- `defender_ensure_notify_alerts_severity_is_high`check to accept high or lower severity [(#7862)](https://github.com/prowler-cloud/prowler/pull/7862)
|
||||
- Replace `Directory.Read.All` permission with `Domain.Read.All` which is more restrictive [(#7888)](https://github.com/prowler-cloud/prowler/pull/7888)
|
||||
- Split calls to list Azure Functions attributes [(#7778)](https://github.com/prowler-cloud/prowler/pull/7778)
|
||||
@@ -1042,7 +1003,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- New check `teams_meeting_chat_anonymous_users_disabled` [(#7579)](https://github.com/prowler-cloud/prowler/pull/7579)
|
||||
- Prowler Threat Score Compliance Framework [(#7603)](https://github.com/prowler-cloud/prowler/pull/7603)
|
||||
- Documentation for M365 provider [(#7622)](https://github.com/prowler-cloud/prowler/pull/7622)
|
||||
- Support for M365 provider in Prowler Dashboard [(#7633)](https://github.com/prowler-cloud/prowler/pull/7633)
|
||||
- Support for m365 provider in Prowler Dashboard [(#7633)](https://github.com/prowler-cloud/prowler/pull/7633)
|
||||
- New check for Modern Authentication enabled for Exchange Online in M365 [(#7636)](https://github.com/prowler-cloud/prowler/pull/7636)
|
||||
- New check `sharepoint_onedrive_sync_restricted_unmanaged_devices` [(#7589)](https://github.com/prowler-cloud/prowler/pull/7589)
|
||||
- New check for Additional Storage restricted for Exchange in M365 [(#7638)](https://github.com/prowler-cloud/prowler/pull/7638)
|
||||
@@ -1052,7 +1013,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- New check for MailTips full enabled for Exchange in M365 [(#7637)](https://github.com/prowler-cloud/prowler/pull/7637)
|
||||
- New check for Comprehensive Attachments Filter Applied for Defender in M365 [(#7661)](https://github.com/prowler-cloud/prowler/pull/7661)
|
||||
- Modified check `exchange_mailbox_properties_auditing_enabled` to make it configurable [(#7662)](https://github.com/prowler-cloud/prowler/pull/7662)
|
||||
- snapshots to M365 documentation [(#7673)](https://github.com/prowler-cloud/prowler/pull/7673)
|
||||
- snapshots to m365 documentation [(#7673)](https://github.com/prowler-cloud/prowler/pull/7673)
|
||||
- support for static credentials for sending findings to Amazon S3 and AWS Security Hub [(#7322)](https://github.com/prowler-cloud/prowler/pull/7322)
|
||||
- Prowler ThreatScore for M365 provider [(#7692)](https://github.com/prowler-cloud/prowler/pull/7692)
|
||||
- Microsoft User and User Credential auth to reports [(#7681)](https://github.com/prowler-cloud/prowler/pull/7681)
|
||||
|
||||
@@ -69,11 +69,11 @@ from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_github import GithubCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_googleworkspace import GoogleWorkspaceCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_oraclecloud import OracleCloudCIS
|
||||
from prowler.lib.outputs.compliance.cisa_scuba.cisa_scuba_googleworkspace import (
|
||||
GoogleWorkspaceCISASCuBA,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_oraclecloud import OracleCloudCIS
|
||||
from prowler.lib.outputs.compliance.compliance import display_compliance_table
|
||||
from prowler.lib.outputs.compliance.csa.csa_alibabacloud import AlibabaCloudCSA
|
||||
from prowler.lib.outputs.compliance.csa.csa_aws import AWSCSA
|
||||
@@ -293,10 +293,6 @@ def prowler():
|
||||
if not args.only_logs:
|
||||
global_provider.print_credentials()
|
||||
|
||||
# --registry-list: listing already printed during provider init, exit
|
||||
if getattr(global_provider, "_listing_only", False):
|
||||
sys.exit()
|
||||
|
||||
# Skip service and check loading for external-tool providers
|
||||
if provider not in EXTERNAL_TOOL_PROVIDERS:
|
||||
# Import custom checks from folder
|
||||
@@ -1315,12 +1311,8 @@ def prowler():
|
||||
global_provider.identity.audited_regions,
|
||||
)
|
||||
if not global_provider.identity.audited_regions
|
||||
else set(global_provider.identity.audited_regions)
|
||||
else global_provider.identity.audited_regions
|
||||
)
|
||||
if global_provider._enabled_regions is not None:
|
||||
security_hub_regions = security_hub_regions.intersection(
|
||||
global_provider._enabled_regions
|
||||
)
|
||||
|
||||
security_hub = SecurityHub(
|
||||
aws_account_id=global_provider.identity.account,
|
||||
|
||||