mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-05-13 15:50:55 +00:00
Compare commits
272 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 19f53df019 | |||
| cd0f1d34c7 | |||
| ccdc01ed7b | |||
| d84099e87a | |||
| cf55f7eb43 | |||
| 9293c7b58d | |||
| a883bb30d4 | |||
| e476bbde2d | |||
| 7f3dcdf02f | |||
| 132e79df89 | |||
| b2ed9ee221 | |||
| def2d3d188 | |||
| 1090ed59b7 | |||
| 67e4b1a082 | |||
| 7478ec9420 | |||
| a30b6623ed | |||
| 15bc317ec4 | |||
| 1536102784 | |||
| 1b99550572 | |||
| 6dfa135755 | |||
| 80482da1cb | |||
| 9cedbd3582 | |||
| c3d1c5c5f7 | |||
| 1fd6c51af6 | |||
| adbfc0bcd1 | |||
| 8f041f6f52 | |||
| 1b0e12ec51 | |||
| 759f7b84d6 | |||
| 0b26c1a39c | |||
| fc7fbddfe7 | |||
| 500b395125 | |||
| a1961d6d5f | |||
| a7e988c361 | |||
| 02cdcb29db | |||
| 6e0d7866cd | |||
| 4b71f37c91 | |||
| cdfbe5b2e3 | |||
| 1b6a459df4 | |||
| 73c0305dc4 | |||
| 0e01e67257 | |||
| 1ad329f9cf | |||
| d03d1d2393 | |||
| 832516be2a | |||
| 34727a7237 | |||
| 4216a3e23a | |||
| a59192e6f5 | |||
| 592bc6f6a8 | |||
| 962ebac8e4 | |||
| 2c5d47a8cd | |||
| bcaa6ac488 | |||
| 71683f3093 | |||
| 2357af912d | |||
| 7971b40f49 | |||
| e585ae45bd | |||
| 4d9921a9b7 | |||
| 19b602c381 | |||
| 4c3e741af7 | |||
| 8affbf44ce | |||
| 16798e293d | |||
| 1194d34396 | |||
| 98277689f5 | |||
| 0ddd7fbd69 | |||
| 22b233f206 | |||
| aa759ab6b7 | |||
| 369d6cecc1 | |||
| d23c2f3b53 | |||
| 786059bfb2 | |||
| 703a33108c | |||
| 7c6d658154 | |||
| 21d7d08b4b | |||
| f314725f4d | |||
| 02f43a7ad6 | |||
| 0dd8981ee4 | |||
| 269e51259d | |||
| f4afdf0541 | |||
| 652cb69216 | |||
| 921f49a0de | |||
| 6cb770fcc8 | |||
| 86449fb99d | |||
| 40dd0e640b | |||
| 8db3a89669 | |||
| c802dc8a36 | |||
| 3ab9a4efa5 | |||
| 36b8aa1b79 | |||
| e821e07d7d | |||
| 228fe6d579 | |||
| 578186aa40 | |||
| 4608e45c8a | |||
| 5987651aee | |||
| 85800f2ddd | |||
| 4fb5272362 | |||
| 85d38b5f71 | |||
| 59dcdb87c4 | |||
| 9297453b8a | |||
| dd37f4ee1f | |||
| 20f36f7c84 | |||
| ec4d27746f | |||
| 7076900fb1 | |||
| 5d90352a0f | |||
| a981dc64a7 | |||
| d2086cad3f | |||
| 380b89cfb6 | |||
| 13b04d339b | |||
| be3c5fb3c1 | |||
| 1de01bcb78 | |||
| 13d983450c | |||
| 8b368e1343 | |||
| c76a9baa20 | |||
| 30e2813e02 | |||
| 0f874c6ffd | |||
| 2242689295 | |||
| e252058af4 | |||
| 37e6c9761f | |||
| ebe666bec7 | |||
| 7df2703db1 | |||
| 67234210ba | |||
| 15ca69942d | |||
| df76efc197 | |||
| 3441ad7f70 | |||
| 059b71d34b | |||
| 013809919c | |||
| 368d9c1519 | |||
| fb6da427f8 | |||
| 65fd3335d3 | |||
| d6288be472 | |||
| 0cddb71d1c | |||
| af2930130c | |||
| b668770480 | |||
| f31c5717e9 | |||
| 4788dcade2 | |||
| 22a6cc9e73 | |||
| 06bb382f8e | |||
| d4ece2b43e | |||
| b97d68fbd5 | |||
| ca79300440 | |||
| 7a0e107617 | |||
| 6d3fcec5da | |||
| ce1cf51d37 | |||
| 3554859a5c | |||
| 80d62f355f | |||
| 0df24eeff6 | |||
| d1fc482832 | |||
| ffb1bb89e1 | |||
| d877bea0e3 | |||
| 2304bf0093 | |||
| 2ca74102a9 | |||
| 6ae129fcc0 | |||
| e9731f53ad | |||
| db2f92e6d5 | |||
| f4b0f8fa22 | |||
| dff5541e11 | |||
| 927be17fb7 | |||
| c27cb28a2a | |||
| 94ee24071a | |||
| 1093f6c99b | |||
| 48060c47ba | |||
| 72acc2119d | |||
| b1ebea4a7e | |||
| 001057644e | |||
| 1456def7d4 | |||
| 12d475e7af | |||
| 43bd1083e0 | |||
| bbd4ce7565 | |||
| 97a085bf21 | |||
| 29a2f8fac8 | |||
| a24869fc26 | |||
| 72c94db1cf | |||
| 4ef7bbdb7c | |||
| f2c5d2ec87 | |||
| 61a62fd6e0 | |||
| 39911e3ab7 | |||
| bcce8d6236 | |||
| 570c86948e | |||
| 548389d79f | |||
| fc3066bc60 | |||
| ac6dd03fb8 | |||
| d3a1df3473 | |||
| 858dfc2a00 | |||
| 6b0ba79652 | |||
| 390bbdd1a6 | |||
| 8d48c26c1e | |||
| 98b9449e14 | |||
| 3406c5ec64 | |||
| 4346401a0a | |||
| dcec79d259 | |||
| 2a9c538aff | |||
| bf1b53bbd2 | |||
| 94a2ea1e8f | |||
| f7194b32de | |||
| 6ffe4e95bf | |||
| 577aa14acc | |||
| 19c752c127 | |||
| f2d35f5885 | |||
| 536e90f2a5 | |||
| 276a5d66bd | |||
| 489c6c1073 | |||
| b08b072288 | |||
| ca29e354b6 | |||
| 85a3927950 | |||
| 04fe3f65e0 | |||
| 297c9d0734 | |||
| a2a1a73749 | |||
| 08fbe17e29 | |||
| d920f78059 | |||
| 12bf3d5e70 | |||
| 4002c28b5d | |||
| 2439f54280 | |||
| b0e59156e6 | |||
| f013bd4a53 | |||
| 6ad15f900f | |||
| 1784bf38ab | |||
| ba5b23245f | |||
| 43913b1592 | |||
| 9e31160887 | |||
| 9a0c73256e | |||
| 2a160a10df | |||
| 8d8bee165b | |||
| 606efec9f8 | |||
| d5354e8b1d | |||
| a96e5890dc | |||
| bb81c5dd2d | |||
| c3acb818d9 | |||
| e6fc59267b | |||
| 62f114f5d0 | |||
| 392ffd5a60 | |||
| 507b0882d5 | |||
| 89d72cf8fd | |||
| f3a042933f | |||
| 96e7d6cb3a | |||
| a82eaa885d | |||
| 90a619a8b4 | |||
| 638bf62d76 | |||
| 962615ca1f | |||
| 5610f5ad90 | |||
| be6fe1db04 | |||
| 92b838866a | |||
| 51591cb8cd | |||
| e24e1ab771 | |||
| bc3fd79457 | |||
| 4941ed5797 | |||
| 0f4d8ff891 | |||
| d1ab8b8ae5 | |||
| 65e9593b41 | |||
| 131112398b | |||
| c952ea018e | |||
| 31b645ee53 | |||
| 0123e603d8 | |||
| b65265da4b | |||
| 1335332fe9 | |||
| f37a2a1efe | |||
| 3e0e1398c4 | |||
| a4ad9ba01f | |||
| c6d5f44c5e | |||
| 5d24a41625 | |||
| e33825747f | |||
| d919d979dd | |||
| 6534faf678 | |||
| 1aa91cf60f | |||
| dad84f0ee2 | |||
| 0d7c5f6ac5 | |||
| 431776bcfd | |||
| 0e8080f09c | |||
| e4b2950436 | |||
| 63174caf98 | |||
| 4e508b69c9 | |||
| 18cfb191f5 | |||
| b898f257f1 | |||
| cccb3a4b94 | |||
| ca50b24d77 | |||
| 7eb204fff0 | |||
| 56c370d3a4 | |||
| b0d8534907 |
@@ -0,0 +1,23 @@
|
||||
# Prowler worktree automation for worktrunk (wt CLI).
|
||||
# Runs automatically on `wt switch --create`.
|
||||
|
||||
# Block 1: setup + copy gitignored env files (.envrc, ui/.env.local)
|
||||
# from the primary worktree — patterns selected via .worktreeinclude.
|
||||
[[pre-start]]
|
||||
skills = "./skills/setup.sh --claude"
|
||||
python = "poetry env use python3.12"
|
||||
envs = "wt step copy-ignored"
|
||||
|
||||
# Block 2: install Python deps (requires `poetry env use` from block 1).
|
||||
[[pre-start]]
|
||||
deps = "poetry install --with dev"
|
||||
|
||||
# Block 3: reminder — last visible output before `wt switch` returns.
|
||||
# Hooks can't mutate the parent shell, so venv activation is manual.
|
||||
[[pre-start]]
|
||||
reminder = "echo '>> Reminder: activate the venv in this shell with: eval $(poetry env activate)'"
|
||||
|
||||
# Background: pnpm install runs while you start working.
|
||||
# Tail logs via `wt config state logs`.
|
||||
[post-start]
|
||||
ui = "cd ui && pnpm install"
|
||||
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
|
||||
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.16.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.27.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
+12
-11
@@ -1,14 +1,15 @@
|
||||
# SDK
|
||||
/* @prowler-cloud/sdk
|
||||
/prowler/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/tests/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/dashboard/ @prowler-cloud/sdk
|
||||
/docs/ @prowler-cloud/sdk
|
||||
/examples/ @prowler-cloud/sdk
|
||||
/util/ @prowler-cloud/sdk
|
||||
/contrib/ @prowler-cloud/sdk
|
||||
/permissions/ @prowler-cloud/sdk
|
||||
/codecov.yml @prowler-cloud/sdk @prowler-cloud/api
|
||||
/* @prowler-cloud/detection-remediation
|
||||
/prowler/ @prowler-cloud/detection-remediation
|
||||
/prowler/compliance/ @prowler-cloud/compliance
|
||||
/tests/ @prowler-cloud/detection-remediation
|
||||
/dashboard/ @prowler-cloud/detection-remediation
|
||||
/docs/ @prowler-cloud/detection-remediation
|
||||
/examples/ @prowler-cloud/detection-remediation
|
||||
/util/ @prowler-cloud/detection-remediation
|
||||
/contrib/ @prowler-cloud/detection-remediation
|
||||
/permissions/ @prowler-cloud/detection-remediation
|
||||
/codecov.yml @prowler-cloud/detection-remediation @prowler-cloud/api
|
||||
|
||||
# API
|
||||
/api/ @prowler-cloud/api
|
||||
@@ -17,7 +18,7 @@
|
||||
/ui/ @prowler-cloud/ui
|
||||
|
||||
# AI
|
||||
/mcp_server/ @prowler-cloud/ai
|
||||
/mcp_server/ @prowler-cloud/detection-remediation
|
||||
|
||||
# Platform
|
||||
/.github/ @prowler-cloud/platform
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [prowler-cloud]
|
||||
# patreon: # Replace with a single Patreon username
|
||||
# open_collective: # Replace with a single Open Collective username
|
||||
# ko_fi: # Replace with a single Ko-fi username
|
||||
# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
# liberapay: # Replace with a single Liberapay username
|
||||
# issuehunt: # Replace with a single IssueHunt username
|
||||
# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
# polar: # Replace with a single Polar username
|
||||
# buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
# thanks_dev: # Replace with a single thanks.dev username
|
||||
# custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
@@ -0,0 +1,143 @@
|
||||
name: "🔎 New Check Request"
|
||||
description: Request a new Prowler security check
|
||||
title: "[New Check]: "
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Existing check search
|
||||
description: Confirm this check does not already exist before opening a new request.
|
||||
options:
|
||||
- label: I have searched existing issues, Prowler Hub, and the public roadmap, and this check does not already exist.
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form to describe the security condition that Prowler should evaluate.
|
||||
|
||||
The most useful inputs for [Prowler Studio](https://github.com/prowler-cloud/prowler-studio) are:
|
||||
- What should be detected
|
||||
- What PASS and FAIL mean
|
||||
- Vendor docs, API references, SDK methods, CLI commands, or reference code
|
||||
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Provider
|
||||
description: Cloud or platform this check targets.
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- OCI
|
||||
- Alibaba Cloud
|
||||
- Cloudflare
|
||||
- MongoDB Atlas
|
||||
- Google Workspace
|
||||
- OpenStack
|
||||
- Vercel
|
||||
- NHN
|
||||
- Other / New provider
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: other_provider_name
|
||||
attributes:
|
||||
label: New provider name
|
||||
description: Only fill this if you selected "Other / New provider" above.
|
||||
placeholder: "NewProviderName"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: service_name
|
||||
attributes:
|
||||
label: Service or product area
|
||||
description: Optional. Main service, product, or feature to audit.
|
||||
placeholder: "s3, bedrock, entra, repository, apiserver"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: suggested_check_name
|
||||
attributes:
|
||||
label: Suggested check name
|
||||
description: Optional. Use `snake_case` following `<service>_<resource>_<best_practice>`, with lowercase letters and underscores only.
|
||||
placeholder: "bedrock_guardrail_sensitive_information_filter_enabled"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
id: context
|
||||
attributes:
|
||||
label: Context and goal
|
||||
description: Describe the security problem, why it matters, and what this new check should help detect.
|
||||
placeholder: |-
|
||||
- Security condition to validate:
|
||||
- Why it matters:
|
||||
- Resource, feature, or configuration involved:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected_behavior
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: Explain what the check should evaluate and what PASS, FAIL, or MANUAL should mean.
|
||||
placeholder: |-
|
||||
- Resource or scope to evaluate:
|
||||
- PASS when:
|
||||
- FAIL when:
|
||||
- MANUAL when (if applicable):
|
||||
- Exclusions, thresholds, or edge cases:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: references
|
||||
attributes:
|
||||
label: References
|
||||
description: Add vendor docs, API references, SDK methods, CLI commands, endpoint docs, sample payloads, or similar reference material.
|
||||
placeholder: |-
|
||||
- Product or service documentation:
|
||||
- API or SDK reference:
|
||||
- CLI command or endpoint documentation:
|
||||
- Sample payload or response:
|
||||
- Security advisory or benchmark:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: severity
|
||||
attributes:
|
||||
label: Suggested severity
|
||||
description: Your best estimate. Reviewers will confirm during triage.
|
||||
options:
|
||||
- Critical
|
||||
- High
|
||||
- Medium
|
||||
- Low
|
||||
- Informational
|
||||
- Not sure
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: implementation_notes
|
||||
attributes:
|
||||
label: Additional implementation notes
|
||||
description: Optional. Add permissions, unsupported regions, config knobs, product limitations, or anything else that may affect implementation.
|
||||
placeholder: |-
|
||||
- Required permissions or scopes:
|
||||
- Region, tenant, or subscription limitations:
|
||||
- Configurable behavior or thresholds:
|
||||
- Other constraints:
|
||||
validations:
|
||||
required: false
|
||||
@@ -13,11 +13,19 @@ inputs:
|
||||
poetry-version:
|
||||
description: 'Poetry version to install'
|
||||
required: false
|
||||
default: '2.1.1'
|
||||
default: '2.3.4'
|
||||
install-dependencies:
|
||||
description: 'Install Python dependencies with Poetry'
|
||||
required: false
|
||||
default: 'true'
|
||||
update-lock:
|
||||
description: 'Run `poetry lock` during setup. Only enable when a prior step mutates pyproject.toml (e.g. API `@master` VCS rewrite). Default: false.'
|
||||
required: false
|
||||
default: 'false'
|
||||
enable-cache:
|
||||
description: 'Whether to enable Poetry dependency caching via actions/setup-python'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -60,21 +68,8 @@ runs:
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update SDK resolved_reference to latest commit (prowler repo on push)
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'prowler-cloud/prowler'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
|
||||
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
|
||||
}' poetry.lock
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock (prowler repo only)
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
if: github.repository == 'prowler-cloud/prowler' && inputs.update-lock == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: poetry lock
|
||||
@@ -83,8 +78,10 @@ runs:
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
cache: 'poetry'
|
||||
cache-dependency-path: ${{ inputs.working-directory }}/poetry.lock
|
||||
# Disable cache when callers skip dependency install: Poetry 2.3.4 creates
|
||||
# the venv in a path setup-python can't hash, breaking the post-step save-cache.
|
||||
cache: ${{ inputs.enable-cache == 'true' && 'poetry' || '' }}
|
||||
cache-dependency-path: ${{ inputs.enable-cache == 'true' && format('{0}/poetry.lock', inputs.working-directory) || '' }}
|
||||
|
||||
- name: Install Python dependencies
|
||||
if: inputs.install-dependencies == 'true'
|
||||
|
||||
@@ -66,6 +66,18 @@ updates:
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
- package-ecosystem: "pre-commit"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pre-commit"
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/04/15
|
||||
# v4.6
|
||||
# - package-ecosystem: "pip"
|
||||
|
||||
@@ -13,6 +13,8 @@ env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -17,6 +17,8 @@ concurrency:
|
||||
env:
|
||||
API_WORKING_DIR: ./api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-code-quality:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -67,6 +69,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -24,6 +24,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-analyze:
|
||||
name: CodeQL Security Analysis
|
||||
|
||||
@@ -33,6 +33,8 @@ env:
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -156,7 +158,7 @@ jobs:
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
|
||||
@@ -5,10 +5,16 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -18,6 +24,8 @@ env:
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -55,16 +63,7 @@ jobs:
|
||||
|
||||
api-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -117,23 +116,22 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build container for ${{ matrix.arch }}
|
||||
- name: Build container
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
|
||||
- name: Scan container with Trivy for ${{ matrix.arch }}
|
||||
- name: Scan container with Trivy
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -5,10 +5,20 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/api-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/api-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -17,6 +27,8 @@ concurrency:
|
||||
env:
|
||||
API_WORKING_DIR: ./api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-security-scans:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -58,6 +70,7 @@ jobs:
|
||||
files: |
|
||||
api/**
|
||||
.github/workflows/api-security.yml
|
||||
.safety-policy.yml
|
||||
files_ignore: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
@@ -70,6 +83,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Bandit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
@@ -77,9 +91,8 @@ jobs:
|
||||
|
||||
- name: Safety
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run safety check --ignore 79023,79027,86217
|
||||
# TODO: 79023 & 79027 knack ReDoS until `azure-cli-core` (via `cartography`) allows `knack` >=0.13.0
|
||||
# TODO: 86217 because `alibabacloud-tea-openapi == 0.4.3` don't let us upgrade `cryptography >= 46.0.0`
|
||||
# Accepted CVEs, severity threshold, and ignore expirations live in ../.safety-policy.yml
|
||||
run: poetry run safety check --policy-file ../.safety-policy.yml
|
||||
|
||||
- name: Vulture
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -30,6 +30,8 @@ env:
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
api-tests:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -116,6 +118,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
update-lock: 'true'
|
||||
|
||||
- name: Run tests with pytest
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -17,6 +17,8 @@ env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
@@ -33,6 +35,7 @@ jobs:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
api.github.com:443
|
||||
github.com:443
|
||||
|
||||
- name: Check labels
|
||||
id: label_check
|
||||
|
||||
@@ -21,6 +21,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
zizmor:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -9,6 +9,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
update-labels:
|
||||
if: contains(github.event.issue.labels.*.name, 'status/awaiting-response')
|
||||
|
||||
@@ -4,8 +4,6 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v3'
|
||||
- 'v4.*'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
@@ -16,6 +14,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -13,6 +13,8 @@ env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_COLOR: B60205
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
create-label:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -41,14 +43,11 @@ jobs:
|
||||
|
||||
echo "Processing release tag: $RELEASE_TAG"
|
||||
|
||||
# Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
VERSION_ONLY="${RELEASE_TAG#v}"
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^([0-9]+)\.([0-9]+)\.0$ ]]; then
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is a minor version. Proceeding to create backport label."
|
||||
|
||||
# Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
MAJOR="${BASH_REMATCH[1]}"
|
||||
MINOR="${BASH_REMATCH[2]}"
|
||||
TWO_DIGIT_VERSION="${MAJOR}.${MINOR}"
|
||||
@@ -60,7 +59,6 @@ jobs:
|
||||
echo "Label name: $LABEL_NAME"
|
||||
echo "Label description: $LABEL_DESC"
|
||||
|
||||
# Check if label already exists
|
||||
if gh label list --repo ${{ github.repository }} --limit 1000 | grep -q "^${LABEL_NAME}[[:space:]]"; then
|
||||
echo "Label '$LABEL_NAME' already exists."
|
||||
else
|
||||
|
||||
@@ -12,72 +12,12 @@ concurrency:
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
DOCS_FILE: docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_minor: ${{ steps.detect.outputs.is_minor }}
|
||||
is_patch: ${{ steps.detect.outputs.is_patch }}
|
||||
major_version: ${{ steps.detect.outputs.major_version }}
|
||||
minor_version: ${{ steps.detect.outputs.minor_version }}
|
||||
patch_version: ${{ steps.detect.outputs.patch_version }}
|
||||
current_docs_version: ${{ steps.get_docs_version.outputs.current_docs_version }}
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Get current documentation version
|
||||
id: get_docs_version
|
||||
run: |
|
||||
CURRENT_DOCS_VERSION=$(grep -oP 'PROWLER_UI_VERSION="\K[^"]+' docs/getting-started/installation/prowler-app.mdx)
|
||||
echo "current_docs_version=${CURRENT_DOCS_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
|
||||
- name: Detect release type and parse version
|
||||
id: detect
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if (( MAJOR_VERSION != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( PATCH_VERSION == 0 )); then
|
||||
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Minor release detected: $PROWLER_VERSION"
|
||||
else
|
||||
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Patch release detected: $PROWLER_VERSION"
|
||||
fi
|
||||
else
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bump-minor-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_minor == 'true'
|
||||
bump-version:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
@@ -89,185 +29,60 @@ jobs:
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
- name: Validate release version
|
||||
run: |
|
||||
if [[ ! $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
if (( ${BASH_REMATCH[1]} != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout master branch
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ env.BASE_BRANCH }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Calculate next minor version
|
||||
- name: Read current docs version on master
|
||||
id: docs_version
|
||||
run: |
|
||||
MAJOR_VERSION=${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MAJOR_VERSION}
|
||||
MINOR_VERSION=${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MINOR_VERSION}
|
||||
CURRENT_DOCS_VERSION="${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_CURRENT_DOCS_VERSION}"
|
||||
|
||||
NEXT_MINOR_VERSION=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).0
|
||||
CURRENT_DOCS_VERSION=$(grep -oP 'PROWLER_UI_VERSION="\K[^"]+' "${DOCS_FILE}")
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_MINOR_VERSION=${NEXT_MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "Current docs version on master: $CURRENT_DOCS_VERSION"
|
||||
echo "Target release version: $PROWLER_VERSION"
|
||||
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
echo "Current release version: $PROWLER_VERSION"
|
||||
echo "Next minor version: $NEXT_MINOR_VERSION"
|
||||
env:
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MAJOR_VERSION: ${{ needs.detect-release-type.outputs.major_version }}
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MINOR_VERSION: ${{ needs.detect-release-type.outputs.minor_version }}
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_CURRENT_DOCS_VERSION: ${{ needs.detect-release-type.outputs.current_docs_version }}
|
||||
# Skip if master is already at or ahead of the release version
|
||||
# (re-run, or patch shipped against an older minor line)
|
||||
HIGHEST=$(printf '%s\n%s\n' "${CURRENT_DOCS_VERSION}" "${PROWLER_VERSION}" | sort -V | tail -n1)
|
||||
if [[ "${CURRENT_DOCS_VERSION}" == "${PROWLER_VERSION}" || "${HIGHEST}" != "${PROWLER_VERSION}" ]]; then
|
||||
echo "skip=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "Skipping bump: current ($CURRENT_DOCS_VERSION) >= release ($PROWLER_VERSION)"
|
||||
else
|
||||
echo "skip=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
- name: Bump versions in documentation for master
|
||||
- name: Bump versions in documentation
|
||||
if: steps.docs_version.outputs.skip == 'false'
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" "${DOCS_FILE}"
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" "${DOCS_FILE}"
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to master
|
||||
if: steps.docs_version.outputs.skip == 'false'
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
- All `*.mdx` files with `<VersionBadge>` components
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Calculate first patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MAJOR_VERSION}
|
||||
MINOR_VERSION=${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MINOR_VERSION}
|
||||
CURRENT_DOCS_VERSION="${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_CURRENT_DOCS_VERSION}"
|
||||
|
||||
FIRST_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIRST_PATCH_VERSION=${FIRST_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "First patch version: $FIRST_PATCH_VERSION"
|
||||
echo "Version branch: $VERSION_BRANCH"
|
||||
env:
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MAJOR_VERSION: ${{ needs.detect-release-type.outputs.major_version }}
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MINOR_VERSION: ${{ needs.detect-release-type.outputs.minor_version }}
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_CURRENT_DOCS_VERSION: ${{ needs.detect-release-type.outputs.current_docs_version }}
|
||||
|
||||
- name: Bump versions in documentation for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to version branch
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}-branch
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
bump-patch-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_patch == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Calculate next patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MAJOR_VERSION}
|
||||
MINOR_VERSION=${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MINOR_VERSION}
|
||||
PATCH_VERSION=${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_PATCH_VERSION}
|
||||
CURRENT_DOCS_VERSION="${NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_CURRENT_DOCS_VERSION}"
|
||||
|
||||
NEXT_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.$((PATCH_VERSION + 1))
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_PATCH_VERSION=${NEXT_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
echo "Current release version: $PROWLER_VERSION"
|
||||
echo "Next patch version: $NEXT_PATCH_VERSION"
|
||||
echo "Target branch: $VERSION_BRANCH"
|
||||
env:
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MAJOR_VERSION: ${{ needs.detect-release-type.outputs.major_version }}
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_MINOR_VERSION: ${{ needs.detect-release-type.outputs.minor_version }}
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_PATCH_VERSION: ${{ needs.detect-release-type.outputs.patch_version }}
|
||||
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_CURRENT_DOCS_VERSION: ${{ needs.detect-release-type.outputs.current_docs_version }}
|
||||
|
||||
- name: Bump versions in documentation for patch version
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to version branch
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
base: ${{ env.BASE_BRANCH }}
|
||||
commit-message: 'chore(docs): Bump version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-bump-to-v${{ env.PROWLER_VERSION }}
|
||||
title: 'chore(docs): Bump version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -14,6 +14,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
scan-secrets:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -25,19 +27,23 @@ jobs:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
github.com:443
|
||||
ghcr.io:443
|
||||
pkg-containers.githubusercontent.com:443
|
||||
# We can't block as Trufflehog needs to verify secrets against vendors
|
||||
egress-policy: audit
|
||||
# allowed-endpoints: >
|
||||
# github.com:443
|
||||
# ghcr.io:443
|
||||
# pkg-containers.githubusercontent.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# PRs only need the diff range; push to master/release walks the new range from event.before.
|
||||
# 50 is enough headroom for the longest realistic PR/push chain without paying for a full clone.
|
||||
fetch-depth: 50
|
||||
persist-credentials: false
|
||||
|
||||
- name: Scan for secrets with TruffleHog
|
||||
- name: Scan diff for secrets with TruffleHog
|
||||
# Action auto-injects --since-commit/--branch from event payload; passing them in extra_args produces duplicate flags.
|
||||
uses: trufflesecurity/trufflehog@ef6e76c3c4023279497fab4721ffa071a722fd05 # v3.92.4
|
||||
with:
|
||||
extra_args: '--results=verified,unknown'
|
||||
extra_args: --results=verified,unknown
|
||||
|
||||
@@ -21,6 +21,8 @@ concurrency:
|
||||
env:
|
||||
CHART_PATH: contrib/k8s/helm/prowler-app
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
helm-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -13,6 +13,8 @@ concurrency:
|
||||
env:
|
||||
CHART_PATH: contrib/k8s/helm/prowler-app
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
release-helm-chart:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -9,6 +9,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
if: |
|
||||
|
||||
@@ -15,6 +15,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -60,7 +62,7 @@ jobs:
|
||||
"Alan-TheGentleman"
|
||||
"alejandrobailo"
|
||||
"amitsharm"
|
||||
"andoniaf"
|
||||
# "andoniaf"
|
||||
"cesararroba"
|
||||
"danibarranqueroo"
|
||||
"HugoPBrito"
|
||||
|
||||
@@ -32,6 +32,8 @@ env:
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-mcp
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -150,7 +152,7 @@ jobs:
|
||||
org.opencontainers.image.created=${{ github.event_name == 'release' && github.event.release.published_at || github.event.head_commit.timestamp }}
|
||||
${{ github.event_name == 'release' && format('org.opencontainers.image.version={0}', env.RELEASE_TAG) || '' }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
|
||||
@@ -5,10 +5,16 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'mcp_server/**'
|
||||
- '.github/workflows/mcp-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'mcp_server/**'
|
||||
- '.github/workflows/mcp-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -18,6 +24,8 @@ env:
|
||||
MCP_WORKING_DIR: ./mcp_server
|
||||
IMAGE_NAME: prowler-mcp
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
mcp-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -54,16 +62,7 @@ jobs:
|
||||
|
||||
mcp-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -87,6 +86,9 @@ jobs:
|
||||
api.github.com:443
|
||||
mirror.gcr.io:443
|
||||
check.trivy.dev:443
|
||||
get.trivy.dev:443
|
||||
release-assets.githubusercontent.com:443
|
||||
objects.githubusercontent.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -107,23 +109,22 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build MCP container for ${{ matrix.arch }}
|
||||
- name: Build MCP container
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.MCP_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
|
||||
- name: Scan MCP container with Trivy for ${{ matrix.arch }}
|
||||
- name: Scan MCP container with Trivy
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -14,6 +14,8 @@ env:
|
||||
PYTHON_VERSION: "3.12"
|
||||
WORKING_DIRECTORY: ./mcp_server
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -84,11 +86,32 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
# The MCP server version (mcp_server/pyproject.toml) is decoupled from the Prowler release
|
||||
# version: it only changes when MCP code changes. mcp-bump-version.yml normally keeps it in
|
||||
# sync with mcp_server/CHANGELOG.md, but this publish workflow still runs on every release.
|
||||
# Pre-flight PyPI check covers the legitimate "no MCP changes for this release" case (and any
|
||||
# workflow_dispatch re-runs) without failing with HTTP 400 (version exists).
|
||||
- name: Check if prowler-mcp version already exists on PyPI
|
||||
id: pypi-check
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
run: |
|
||||
MCP_VERSION=$(grep '^version' pyproject.toml | head -1 | sed -E 's/^version[[:space:]]*=[[:space:]]*"([^"]+)".*/\1/')
|
||||
echo "mcp_version=${MCP_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
if curl -fsS "https://pypi.org/pypi/prowler-mcp/${MCP_VERSION}/json" >/dev/null 2>&1; then
|
||||
echo "skip=true" >> "$GITHUB_OUTPUT"
|
||||
echo "::notice title=Skipping prowler-mcp publish::Version ${MCP_VERSION} already exists on PyPI; bump mcp_server/pyproject.toml to publish a new release."
|
||||
else
|
||||
echo "skip=false" >> "$GITHUB_OUTPUT"
|
||||
echo "::notice title=Publishing prowler-mcp::Version ${MCP_VERSION} not on PyPI yet; proceeding."
|
||||
fi
|
||||
|
||||
- name: Build prowler-mcp package
|
||||
if: steps.pypi-check.outputs.skip != 'true'
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
run: uv build
|
||||
|
||||
- name: Publish prowler-mcp package to PyPI
|
||||
if: steps.pypi-check.outputs.skip != 'true'
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
packages-dir: ${{ env.WORKING_DIRECTORY }}/dist/
|
||||
|
||||
@@ -0,0 +1,98 @@
|
||||
name: 'Nightly: ARM64 Container Builds'
|
||||
|
||||
# Mitigation for amd64-only PR container-checks: build amd64+arm64 nightly against
|
||||
# master to keep arm-specific Dockerfile regressions caught quickly. Build only —
|
||||
# no push, no Trivy (weekly checks already cover that).
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * *'
|
||||
workflow_dispatch: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
build-arm64:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- component: sdk
|
||||
context: .
|
||||
dockerfile: ./Dockerfile
|
||||
image_name: prowler
|
||||
- component: api
|
||||
context: ./api
|
||||
dockerfile: ./api/Dockerfile
|
||||
image_name: prowler-api
|
||||
- component: ui
|
||||
context: ./ui
|
||||
dockerfile: ./ui/Dockerfile
|
||||
image_name: prowler-ui
|
||||
target: prod
|
||||
build_args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
- component: mcp
|
||||
context: ./mcp_server
|
||||
dockerfile: ./mcp_server/Dockerfile
|
||||
image_name: prowler-mcp
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build ${{ matrix.component }} container (linux/arm64)
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.dockerfile }}
|
||||
target: ${{ matrix.target }}
|
||||
push: false
|
||||
load: false
|
||||
platforms: linux/arm64
|
||||
tags: ${{ matrix.image_name }}:nightly-arm64
|
||||
build-args: ${{ matrix.build_args }}
|
||||
cache-from: type=gha,scope=arm64
|
||||
cache-to: type=gha,mode=min,scope=arm64
|
||||
|
||||
notify-failure:
|
||||
needs: build-arm64
|
||||
if: failure() && github.event_name == 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Notify Slack on failure
|
||||
uses: slackapi/slack-github-action@91efab103c0de0a537f72a35f6b8cda0ee76bf0a # v2.1.1
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
text: ":rotating_light: Nightly arm64 container build failed for prowler — <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|view run>"
|
||||
errors: true
|
||||
@@ -16,6 +16,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
@@ -39,10 +41,15 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 1
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Fetch PR base ref for tj-actions/changed-files
|
||||
env:
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
run: git fetch --depth=1 origin "${BASE_REF}"
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
|
||||
@@ -16,9 +16,17 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-compliance-mapping:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-compliance-check') == false
|
||||
if: >-
|
||||
github.event.pull_request.state == 'open' &&
|
||||
contains(github.event.pull_request.labels.*.name, 'no-compliance-check') == false &&
|
||||
(
|
||||
(github.event.action != 'labeled' && github.event.action != 'unlabeled')
|
||||
|| github.event.label.name == 'no-compliance-check'
|
||||
)
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
@@ -37,10 +45,15 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 1
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Fetch PR base ref for tj-actions/changed-files
|
||||
env:
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
run: git fetch --depth=1 origin "${BASE_REF}"
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
|
||||
@@ -15,6 +15,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -34,8 +36,14 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
fetch-depth: 1
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Fetch PR base ref for tj-actions/changed-files
|
||||
env:
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
run: git fetch --depth=1 origin "${BASE_REF}"
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
|
||||
@@ -12,6 +12,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
if: |
|
||||
|
||||
@@ -17,6 +17,8 @@ concurrency:
|
||||
env:
|
||||
PROWLER_VERSION: ${{ inputs.prowler_version }}
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.event_name == 'workflow_dispatch' && github.repository == 'prowler-cloud/prowler'
|
||||
@@ -38,15 +40,12 @@ jobs:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
python3 -m pip install --user poetry==2.1.1
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
|
||||
@@ -13,6 +13,8 @@ env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -111,9 +113,9 @@ jobs:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
commit-message: 'chore(sdk): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
branch: sdk-version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
|
||||
title: 'chore(sdk): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
@@ -163,9 +165,9 @@ jobs:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
commit-message: 'chore(sdk): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
branch: sdk-version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
|
||||
title: 'chore(sdk): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
@@ -231,9 +233,9 @@ jobs:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
commit-message: 'chore(sdk): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
branch: sdk-version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
|
||||
title: 'chore(sdk): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -5,11 +5,16 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'tests/providers/**/*_test.py'
|
||||
- '.github/workflows/sdk-check-duplicate-test-names.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-duplicate-test-names:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -14,6 +14,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-code-quality:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -69,22 +71,11 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Install Poetry
|
||||
- name: Setup Python with Poetry
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
|
||||
- name: Check Poetry lock file
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -30,6 +30,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-analyze:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -3,9 +3,7 @@ name: 'SDK: Container Build and Push'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'v3' # For v3-latest
|
||||
- 'v4.6' # For v4-latest
|
||||
- 'master' # For latest
|
||||
- 'master'
|
||||
paths-ignore:
|
||||
- '.github/**'
|
||||
- '!.github/workflows/sdk-container-build-push.yml'
|
||||
@@ -47,6 +45,8 @@ env:
|
||||
# AWS configuration (for ECR)
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -54,7 +54,6 @@ jobs:
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.prowler_version }}
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.prowler_version_major }}
|
||||
latest_tag: ${{ steps.get-prowler-version.outputs.latest_tag }}
|
||||
stable_tag: ${{ steps.get-prowler-version.outputs.stable_tag }}
|
||||
permissions:
|
||||
@@ -74,15 +73,15 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
pipx inject poetry poetry-bumpversion
|
||||
- name: Inject poetry-bumpversion plugin
|
||||
run: pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version and set tags
|
||||
id: get-prowler-version
|
||||
@@ -90,32 +89,13 @@ jobs:
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "prowler_version=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Extract major version
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
echo "prowler_version_major=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Set version-specific tags
|
||||
case ${PROWLER_VERSION_MAJOR} in
|
||||
3)
|
||||
echo "latest_tag=v3-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v3-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v3 detected - tags: v3-latest, v3-stable"
|
||||
;;
|
||||
4)
|
||||
echo "latest_tag=v4-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v4-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v4 detected - tags: v4-latest, v4-stable"
|
||||
;;
|
||||
5)
|
||||
echo "latest_tag=latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v5 detected - tags: latest, stable"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Unsupported Prowler major version: ${PROWLER_VERSION_MAJOR}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
if [[ "${PROWLER_VERSION_MAJOR}" != "5" ]]; then
|
||||
echo "::error::Unsupported Prowler major version: ${PROWLER_VERSION_MAJOR}"
|
||||
exit 1
|
||||
fi
|
||||
echo "latest_tag=latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=stable" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
notify-release-started:
|
||||
if: github.repository == 'prowler-cloud/prowler' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
@@ -226,7 +206,7 @@ jobs:
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
@@ -384,39 +364,3 @@ jobs:
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.outcome.outputs.outcome }}
|
||||
update-ts: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
|
||||
dispatch-v3-deployment:
|
||||
needs: [setup, container-build-push]
|
||||
if: always() && needs.setup.outputs.prowler_version_major == '3' && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Calculate short SHA
|
||||
id: short-sha
|
||||
run: echo "short_sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Dispatch v3 deployment (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}
|
||||
event-type: dispatch
|
||||
client-payload: '{"version":"v3-latest","tag":"${{ steps.short-sha.outputs.short_sha }}"}'
|
||||
|
||||
- name: Dispatch v3 deployment (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}
|
||||
event-type: dispatch
|
||||
client-payload: '{"version":"release","tag":"${{ needs.setup.outputs.prowler_version }}"}'
|
||||
|
||||
@@ -5,10 +5,22 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'Dockerfile*'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'Dockerfile*'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -17,6 +29,8 @@ concurrency:
|
||||
env:
|
||||
IMAGE_NAME: prowler
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -54,16 +68,7 @@ jobs:
|
||||
|
||||
sdk-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -85,6 +90,7 @@ jobs:
|
||||
check.trivy.dev:443
|
||||
debian.map.fastlydns.net:80
|
||||
release-assets.githubusercontent.com:443
|
||||
objects.githubusercontent.com:443
|
||||
pypi.org:443
|
||||
files.pythonhosted.org:443
|
||||
www.powershellgallery.com:443
|
||||
@@ -129,23 +135,22 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build SDK container for ${{ matrix.arch }}
|
||||
- name: Build SDK container
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
load: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
|
||||
- name: Scan SDK container with Trivy for ${{ matrix.arch }}
|
||||
- name: Scan SDK container with Trivy
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -13,6 +13,8 @@ env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: '3.12'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -73,13 +75,12 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Build Prowler package
|
||||
run: poetry build
|
||||
@@ -111,13 +112,12 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
install-dependencies: 'false'
|
||||
enable-cache: 'false'
|
||||
|
||||
- name: Install toml package
|
||||
run: pip install toml
|
||||
|
||||
@@ -13,6 +13,8 @@ env:
|
||||
PYTHON_VERSION: '3.12'
|
||||
AWS_REGION: 'us-east-1'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
refresh-aws-regions:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -12,6 +12,8 @@ concurrency:
|
||||
env:
|
||||
PYTHON_VERSION: '3.12'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
refresh-oci-regions:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -5,15 +5,33 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-tests.yml'
|
||||
- '.github/workflows/sdk-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-tests.yml'
|
||||
- '.github/workflows/sdk-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-security-scans:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -69,20 +87,11 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Install Poetry
|
||||
- name: Setup Python with Poetry
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python 3.12
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: '3.12'
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry install --no-root
|
||||
|
||||
- name: Security scan with Bandit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
@@ -90,7 +99,8 @@ jobs:
|
||||
|
||||
- name: Security scan with Safety
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry run safety check -r pyproject.toml
|
||||
# Accepted CVEs, severity threshold, and ignore expirations live in .safety-policy.yml
|
||||
run: poetry run safety check -r pyproject.toml --policy-file .safety-policy.yml
|
||||
|
||||
- name: Dead code detection with Vulture
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -14,6 +14,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdk-tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -90,20 +92,11 @@ jobs:
|
||||
contrib/**
|
||||
**/AGENTS.md
|
||||
|
||||
- name: Install Poetry
|
||||
- name: Setup Python with Poetry
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: poetry install --no-root
|
||||
|
||||
# AWS Provider
|
||||
- name: Check if AWS files changed
|
||||
@@ -216,11 +209,11 @@ jobs:
|
||||
echo "AWS service_paths='${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}'"
|
||||
|
||||
if [ "${STEPS_AWS_SERVICES_OUTPUTS_RUN_ALL}" = "true" ]; then
|
||||
poetry run pytest -p no:randomly -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
elif [ -z "${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}" ]; then
|
||||
echo "No AWS service paths detected; skipping AWS tests."
|
||||
else
|
||||
poetry run pytest -p no:randomly -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml ${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml ${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}
|
||||
fi
|
||||
env:
|
||||
STEPS_AWS_SERVICES_OUTPUTS_RUN_ALL: ${{ steps.aws-services.outputs.run_all }}
|
||||
|
||||
@@ -31,6 +31,8 @@ on:
|
||||
description: "Whether there are UI E2E tests to run"
|
||||
value: ${{ jobs.analyze.outputs.has-ui-e2e }}
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -13,6 +13,8 @@ env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -26,6 +26,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
ui-analyze:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
|
||||
@@ -35,6 +35,8 @@ env:
|
||||
# Build args
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -149,7 +151,7 @@ jobs:
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
|
||||
@@ -5,10 +5,16 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'ui/**'
|
||||
- '.github/workflows/ui-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'ui/**'
|
||||
- '.github/workflows/ui-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -18,6 +24,8 @@ env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
IMAGE_NAME: prowler-ui
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
ui-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
@@ -55,16 +63,7 @@ jobs:
|
||||
|
||||
ui-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -89,6 +88,8 @@ jobs:
|
||||
mirror.gcr.io:443
|
||||
check.trivy.dev:443
|
||||
get.trivy.dev:443
|
||||
release-assets.githubusercontent.com:443
|
||||
objects.githubusercontent.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -110,7 +111,7 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build UI container for ${{ matrix.arch }}
|
||||
- name: Build UI container
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
@@ -118,18 +119,17 @@ jobs:
|
||||
target: prod
|
||||
push: false
|
||||
load: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
|
||||
- name: Scan UI container with Trivy for ${{ matrix.arch }}
|
||||
- name: Scan UI container with Trivy
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -15,6 +15,12 @@ on:
|
||||
- 'ui/**'
|
||||
- 'api/**' # API changes can affect UI E2E
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
# First, analyze which tests need to run
|
||||
impact-analysis:
|
||||
@@ -264,7 +270,7 @@ jobs:
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
retention-days: 7
|
||||
|
||||
- name: Cleanup services
|
||||
if: always()
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
name: 'UI: Tests'
|
||||
name: "UI: Tests"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
- "master"
|
||||
- "v5.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
- "master"
|
||||
- "v5.*"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -16,7 +16,9 @@ concurrency:
|
||||
|
||||
env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
NODE_VERSION: '24.13.0'
|
||||
NODE_VERSION: "24.13.0"
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
ui-tests:
|
||||
@@ -40,6 +42,9 @@ jobs:
|
||||
fonts.gstatic.com:443
|
||||
api.github.com:443
|
||||
release-assets.githubusercontent.com:443
|
||||
cdn.playwright.dev:443
|
||||
objects.githubusercontent.com:443
|
||||
playwright.download.prss.microsoft.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -131,7 +136,7 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
echo "Critical paths changed - running ALL unit tests"
|
||||
pnpm run test:run
|
||||
pnpm run test:unit
|
||||
|
||||
- name: Run unit tests (related to changes only)
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed != 'true' && steps.changed-source.outputs.all_changed_files != ''
|
||||
@@ -140,7 +145,7 @@ jobs:
|
||||
echo "${STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES}"
|
||||
# Convert space-separated to vitest related format (remove ui/ prefix for relative paths)
|
||||
CHANGED_FILES=$(echo "${STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES}" | tr ' ' '\n' | sed 's|^ui/||' | tr '\n' ' ')
|
||||
pnpm exec vitest related $CHANGED_FILES --run
|
||||
pnpm exec vitest related $CHANGED_FILES --run --project unit
|
||||
env:
|
||||
STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES: ${{ steps.changed-source.outputs.all_changed_files }}
|
||||
|
||||
@@ -148,7 +153,25 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed != 'true' && steps.changed-source.outputs.all_changed_files == ''
|
||||
run: |
|
||||
echo "Only test files changed - running ALL unit tests"
|
||||
pnpm run test:run
|
||||
pnpm run test:unit
|
||||
|
||||
- name: Cache Playwright browsers
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: playwright-cache
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-chromium-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-chromium-
|
||||
|
||||
- name: Install Playwright Chromium browser
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: pnpm exec playwright install chromium
|
||||
|
||||
- name: Run browser tests
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run test:browser
|
||||
|
||||
- name: Build application
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -8,6 +8,7 @@ rules:
|
||||
- docs-bump-version.yml
|
||||
- issue-triage.lock.yml
|
||||
- mcp-container-build-push.yml
|
||||
- nightly-arm64-container-builds.yml
|
||||
- pr-merged.yml
|
||||
- prepare-release.yml
|
||||
- sdk-bump-version.yml
|
||||
|
||||
@@ -84,6 +84,7 @@ continue.json
|
||||
.continuerc.json
|
||||
|
||||
# AI Coding Assistants - OpenCode
|
||||
.opencode/
|
||||
opencode.json
|
||||
|
||||
# AI Coding Assistants - GitHub Copilot
|
||||
@@ -150,6 +151,8 @@ node_modules
|
||||
|
||||
# Persistent data
|
||||
_data/
|
||||
/openspec/
|
||||
/.gitmodules
|
||||
|
||||
# AI Instructions (generated by skills/setup.sh from AGENTS.md)
|
||||
CLAUDE.md
|
||||
|
||||
+96
-45
@@ -1,148 +1,199 @@
|
||||
# Priority tiers (lower = runs first, same priority = concurrent):
|
||||
# P0 — fast file fixers
|
||||
# P10 — validators and guards
|
||||
# P20 — auto-formatters
|
||||
# P30 — linters
|
||||
# P40 — security scanners
|
||||
# P50 — dependency validation
|
||||
|
||||
default_install_hook_types: [pre-commit]
|
||||
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
## GENERAL (prek built-in — no external repo needed)
|
||||
- repo: builtin
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
priority: 10
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
exclude: prowler/config/llm_config.yaml
|
||||
args: ["--allow-multiple-documents"]
|
||||
exclude: (prowler/config/llm_config.yaml|contrib/)
|
||||
priority: 10
|
||||
- id: check-json
|
||||
priority: 10
|
||||
- id: end-of-file-fixer
|
||||
priority: 0
|
||||
- id: trailing-whitespace
|
||||
priority: 0
|
||||
- id: no-commit-to-branch
|
||||
priority: 10
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", --no-sort-keys, --no-ensure-ascii]
|
||||
priority: 10
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
rev: v2.16.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
priority: 20
|
||||
|
||||
## GITHUB ACTIONS
|
||||
- repo: https://github.com/zizmorcore/zizmor-pre-commit
|
||||
rev: v1.6.0
|
||||
rev: v1.24.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
files: ^\.github/
|
||||
# zizmor only audits workflows, composite actions and dependabot
|
||||
# config; broader paths trip exit 3 ("no audit was performed").
|
||||
files: ^\.github/(workflows|actions)/.+\.ya?ml$|^\.github/dependabot\.ya?ml$
|
||||
priority: 30
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
rev: v0.11.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
priority: 30
|
||||
|
||||
## PYTHON
|
||||
## PYTHON — SDK (prowler/, tests/, dashboard/, util/, scripts/)
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.1
|
||||
rev: v2.3.3
|
||||
hooks:
|
||||
- id: autoflake
|
||||
exclude: ^skills/
|
||||
args:
|
||||
[
|
||||
"--in-place",
|
||||
"--remove-all-unused-imports",
|
||||
"--remove-unused-variable",
|
||||
]
|
||||
name: "SDK - autoflake"
|
||||
files: { glob: ["{prowler,tests,dashboard,util,scripts}/**/*.py"] }
|
||||
args: ["--in-place", "--remove-all-unused-imports", "--remove-unused-variable"]
|
||||
priority: 20
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.13.2
|
||||
rev: 8.0.1
|
||||
hooks:
|
||||
- id: isort
|
||||
exclude: ^skills/
|
||||
name: "SDK - isort"
|
||||
files: { glob: ["{prowler,tests,dashboard,util,scripts}/**/*.py"] }
|
||||
args: ["--profile", "black"]
|
||||
priority: 20
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.4.2
|
||||
rev: 26.3.1
|
||||
hooks:
|
||||
- id: black
|
||||
exclude: ^skills/
|
||||
name: "SDK - black"
|
||||
files: { glob: ["{prowler,tests,dashboard,util,scripts}/**/*.py"] }
|
||||
priority: 20
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 7.0.0
|
||||
rev: 7.3.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: (contrib|^skills/)
|
||||
name: "SDK - flake8"
|
||||
files: { glob: ["{prowler,tests,dashboard,util,scripts}/**/*.py"] }
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
priority: 30
|
||||
|
||||
## PYTHON — API + MCP Server (ruff)
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.15.11
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: "API + MCP - ruff check"
|
||||
files: { glob: ["{api,mcp_server}/**/*.py"] }
|
||||
args: ["--fix"]
|
||||
priority: 30
|
||||
- id: ruff-format
|
||||
name: "API + MCP - ruff format"
|
||||
files: { glob: ["{api,mcp_server}/**/*.py"] }
|
||||
priority: 20
|
||||
|
||||
## PYTHON — Poetry
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.1.1
|
||||
rev: 2.3.4
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
args: ["--directory=./api"]
|
||||
files: { glob: ["api/{pyproject.toml,poetry.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
- id: poetry-lock
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
files: { glob: ["api/{pyproject.toml,poetry.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
- id: poetry-check
|
||||
name: SDK - poetry-check
|
||||
args: ["--directory=./"]
|
||||
files: { glob: ["{pyproject.toml,poetry.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
files: { glob: ["{pyproject.toml,poetry.lock}"] }
|
||||
pass_filenames: false
|
||||
priority: 50
|
||||
|
||||
## CONTAINERS
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
rev: v2.14.0
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013"]
|
||||
priority: 30
|
||||
|
||||
## LOCAL HOOKS
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'pylint --disable=W,C,R,E -j 0 -rn -sn prowler/'
|
||||
name: "SDK - pylint"
|
||||
entry: pylint --disable=W,C,R,E -j 0 -rn -sn
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
types: [python]
|
||||
files: { glob: ["{prowler,tests,dashboard,util,scripts}/**/*.py"] }
|
||||
priority: 30
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
entry: bash -c 'trufflehog --no-update git file://. --since-commit HEAD --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
pass_filenames: false
|
||||
stages: ["pre-commit", "pre-push"]
|
||||
priority: 40
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/,./skills/' -r .'
|
||||
entry: bandit -q -lll
|
||||
language: system
|
||||
types: [python]
|
||||
files: '.*\.py'
|
||||
exclude: { glob: ["{contrib,skills}/**", "**/.venv/**", "**/*_test.py"] }
|
||||
priority: 40
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
# TODO: 79023 & 79027 knack ReDoS until `azure-cli-core` (via `cartography`) allows `knack` >=0.13.0
|
||||
# TODO: 86217 because `alibabacloud-tea-openapi == 0.4.3` don't let us upgrade `cryptography >= 46.0.0`
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745,79023,79027,86217'
|
||||
# Accepted CVEs, severity threshold, and ignore expirations live in .safety-policy.yml
|
||||
entry: safety check --policy-file .safety-policy.yml
|
||||
language: system
|
||||
pass_filenames: false
|
||||
files: { glob: ["**/pyproject.toml", "**/poetry.lock", "**/requirements*.txt", ".safety-policy.yml"] }
|
||||
priority: 40
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/,skills/" --min-confidence 100 .'
|
||||
entry: vulture --min-confidence 100
|
||||
language: system
|
||||
types: [python]
|
||||
files: '.*\.py'
|
||||
|
||||
- id: ui-checks
|
||||
name: UI - Husky Pre-commit
|
||||
description: "Run UI pre-commit checks (Claude Code validation + healthcheck)"
|
||||
entry: bash -c 'cd ui && .husky/pre-commit'
|
||||
language: system
|
||||
files: '^ui/.*\.(ts|tsx|js|jsx|json|css)$'
|
||||
pass_filenames: false
|
||||
verbose: true
|
||||
priority: 40
|
||||
|
||||
+1
-1
@@ -13,7 +13,7 @@ build:
|
||||
post_create_environment:
|
||||
# Install poetry
|
||||
# https://python-poetry.org/docs/#installing-manually
|
||||
- python -m pip install poetry
|
||||
- python -m pip install poetry==2.3.4
|
||||
post_install:
|
||||
# Install dependencies with 'docs' dependency group
|
||||
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
|
||||
|
||||
@@ -0,0 +1,58 @@
|
||||
# Safety policy for `safety check` (Safety CLI 3.x, v2 schema).
|
||||
# Applied in: .pre-commit-config.yaml, .github/workflows/api-security.yml,
|
||||
# .github/workflows/sdk-security.yml via `--policy-file`.
|
||||
#
|
||||
# Validate: poetry run safety validate policy_file --path .safety-policy.yml
|
||||
|
||||
security:
|
||||
# Scan unpinned requirements too. Prowler pins via poetry.lock, so this is
|
||||
# defensive against accidental unpinned entries.
|
||||
ignore-unpinned-requirements: False
|
||||
|
||||
# CVSS severity filter. 7 = report only HIGH (7.0–8.9) and CRITICAL (9.0–10.0).
|
||||
# Reference: 9=CRITICAL only, 7=CRITICAL+HIGH, 4=CRITICAL+HIGH+MEDIUM.
|
||||
ignore-cvss-severity-below: 7
|
||||
|
||||
# Unknown severity is unrated, not safe. Keep False so unrated CVEs still fail
|
||||
# the build and get a human eye. Flip to True only if noise is unmanageable.
|
||||
ignore-cvss-unknown-severity: False
|
||||
|
||||
# Fail the build when a non-ignored vulnerability is found.
|
||||
continue-on-vulnerability-error: False
|
||||
|
||||
# Explicit accepted vulnerabilities. Each entry MUST have a reason and an
|
||||
# expiry. Expired entries fail the scan, forcing re-audit.
|
||||
ignore-vulnerabilities:
|
||||
77744:
|
||||
reason: "Botocore requires urllib3 1.X. Remove once upgraded to urllib3 2.X."
|
||||
expires: '2026-10-22'
|
||||
77745:
|
||||
reason: "Botocore requires urllib3 1.X. Remove once upgraded to urllib3 2.X."
|
||||
expires: '2026-10-22'
|
||||
79023:
|
||||
reason: "knack ReDoS; blocked until azure-cli-core (via cartography) allows knack >=0.13.0."
|
||||
expires: '2026-10-22'
|
||||
79027:
|
||||
reason: "knack ReDoS; blocked until azure-cli-core (via cartography) allows knack >=0.13.0."
|
||||
expires: '2026-10-22'
|
||||
86217:
|
||||
reason: "alibabacloud-tea-openapi==0.4.3 blocks upgrade to cryptography >=46.0.0."
|
||||
expires: '2026-10-22'
|
||||
71600:
|
||||
reason: "CVE-2024-1135 false positive. Fixed in gunicorn 22.0.0; project uses 23.0.0."
|
||||
expires: '2026-10-22'
|
||||
70612:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
66963:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
74429:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
76352:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
76353:
|
||||
reason: "TBD - audit required. Reason not documented in prior --ignore list."
|
||||
expires: '2026-07-22'
|
||||
@@ -0,0 +1,2 @@
|
||||
.envrc
|
||||
ui/.env.local
|
||||
@@ -15,7 +15,7 @@ Use these skills for detailed patterns on-demand:
|
||||
|-------|-------------|-----|
|
||||
| `typescript` | Const types, flat interfaces, utility types | [SKILL.md](skills/typescript/SKILL.md) |
|
||||
| `react-19` | No useMemo/useCallback, React Compiler | [SKILL.md](skills/react-19/SKILL.md) |
|
||||
| `nextjs-15` | App Router, Server Actions, streaming | [SKILL.md](skills/nextjs-15/SKILL.md) |
|
||||
| `nextjs-16` | App Router, Server Actions, proxy.ts, streaming | [SKILL.md](skills/nextjs-16/SKILL.md) |
|
||||
| `tailwind-4` | cn() utility, no var() in className | [SKILL.md](skills/tailwind-4/SKILL.md) |
|
||||
| `playwright` | Page Object Model, MCP workflow, selectors | [SKILL.md](skills/playwright/SKILL.md) |
|
||||
| `pytest` | Fixtures, mocking, markers, parametrize | [SKILL.md](skills/pytest/SKILL.md) |
|
||||
@@ -60,11 +60,14 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding DRF pagination or permissions | `django-drf` |
|
||||
| Adding a compliance output formatter (per-provider class + table dispatcher) | `prowler-compliance` |
|
||||
| Adding indexes or constraints to database tables | `django-migration-psql` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| After creating/modifying a skill | `skill-sync` |
|
||||
| App Router / Server Actions | `nextjs-15` |
|
||||
| App Router / Server Actions | `nextjs-16` |
|
||||
| Auditing check-to-requirement mappings as a cloud auditor | `prowler-compliance` |
|
||||
| Building AI chat features | `ai-sdk-5` |
|
||||
| Committing changes | `prowler-commit` |
|
||||
| Configuring MCP servers in agentic workflows | `gh-aw` |
|
||||
@@ -78,6 +81,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Creating a git commit | `prowler-commit` |
|
||||
| Creating new checks | `prowler-sdk-check` |
|
||||
| Creating new skills | `skill-creator` |
|
||||
| Creating or reviewing Django migrations | `django-migration-psql` |
|
||||
| Creating/modifying Prowler UI components | `prowler-ui` |
|
||||
| Creating/modifying models, views, serializers | `prowler-api` |
|
||||
| Creating/updating compliance frameworks | `prowler-compliance` |
|
||||
@@ -85,6 +89,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Debugging gh-aw compilation errors | `gh-aw` |
|
||||
| Fill .github/pull_request_template.md (Context/Description/Steps to review/Checklist) | `prowler-pr` |
|
||||
| Fixing bug | `tdd` |
|
||||
| Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs) | `prowler-compliance` |
|
||||
| General Prowler development questions | `prowler` |
|
||||
| Implementing JSON:API endpoints | `django-drf` |
|
||||
| Implementing feature | `tdd` |
|
||||
@@ -102,6 +107,8 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Review changelog format and conventions | `prowler-changelog` |
|
||||
| Reviewing JSON:API compliance | `jsonapi` |
|
||||
| Reviewing compliance framework PRs | `prowler-compliance-review` |
|
||||
| Running makemigrations or pgmakemigrations | `django-migration-psql` |
|
||||
| Syncing compliance framework with upstream catalog | `prowler-compliance` |
|
||||
| Testing RLS tenant isolation | `prowler-test-api` |
|
||||
| Testing hooks or utilities | `vitest` |
|
||||
| Troubleshoot why a skill is missing from AGENTS.md auto-invoke | `skill-sync` |
|
||||
@@ -129,6 +136,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Writing React components | `react-19` |
|
||||
| Writing TypeScript types/interfaces | `typescript` |
|
||||
| Writing Vitest tests | `vitest` |
|
||||
| Writing data backfill or data migration | `django-migration-psql` |
|
||||
| Writing documentation | `prowler-docs` |
|
||||
| Writing unit tests for UI | `vitest` |
|
||||
|
||||
@@ -140,9 +148,9 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
|
||||
| Component | Location | Tech Stack |
|
||||
|-----------|----------|------------|
|
||||
| SDK | `prowler/` | Python 3.10+, Poetry |
|
||||
| SDK | `prowler/` | Python 3.10+, Poetry 2.3+ |
|
||||
| API | `api/` | Django 5.1, DRF, Celery |
|
||||
| UI | `ui/` | Next.js 15, React 19, Tailwind 4 |
|
||||
| UI | `ui/` | Next.js 16, React 19, Tailwind 4 |
|
||||
| MCP Server | `mcp_server/` | FastMCP, Python 3.12+ |
|
||||
| Dashboard | `dashboard/` | Dash, Plotly |
|
||||
|
||||
@@ -153,12 +161,12 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
```bash
|
||||
# Setup
|
||||
poetry install --with dev
|
||||
poetry run pre-commit install
|
||||
poetry run prek install
|
||||
|
||||
# Code quality
|
||||
poetry run make lint
|
||||
poetry run make format
|
||||
poetry run pre-commit run --all-files
|
||||
poetry run prek run --all-files
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
+29
-6
@@ -1,11 +1,34 @@
|
||||
# Do you want to learn on how to...
|
||||
|
||||
- Contribute with your code or fixes to Prowler
|
||||
- Create a new check for a provider
|
||||
- Create a new security compliance framework
|
||||
- Add a custom output format
|
||||
- Add a new integration
|
||||
- Contribute with documentation
|
||||
- [Contribute with your code or fixes to Prowler](https://docs.prowler.com/developer-guide/introduction)
|
||||
- [Create a new provider](https://docs.prowler.com/developer-guide/provider)
|
||||
- [Create a new service](https://docs.prowler.com/developer-guide/services)
|
||||
- [Create a new check for a provider](https://docs.prowler.com/developer-guide/checks)
|
||||
- [Create a new security compliance framework](https://docs.prowler.com/developer-guide/security-compliance-framework)
|
||||
- [Add a custom output format](https://docs.prowler.com/developer-guide/outputs)
|
||||
- [Add a new integration](https://docs.prowler.com/developer-guide/integrations)
|
||||
- [Contribute with documentation](https://docs.prowler.com/developer-guide/documentation)
|
||||
- [Write unit tests](https://docs.prowler.com/developer-guide/unit-testing)
|
||||
- [Write integration tests](https://docs.prowler.com/developer-guide/integration-testing)
|
||||
- [Write end-to-end tests](https://docs.prowler.com/developer-guide/end2end-testing)
|
||||
- [Debug Prowler](https://docs.prowler.com/developer-guide/debugging)
|
||||
- [Configure checks](https://docs.prowler.com/developer-guide/configurable-checks)
|
||||
- [Rename checks](https://docs.prowler.com/developer-guide/renaming-checks)
|
||||
- [Follow the check metadata guidelines](https://docs.prowler.com/developer-guide/check-metadata-guidelines)
|
||||
- [Extend the MCP server](https://docs.prowler.com/developer-guide/mcp-server)
|
||||
- [Extend Lighthouse AI](https://docs.prowler.com/developer-guide/lighthouse-architecture)
|
||||
- [Add AI skills](https://docs.prowler.com/developer-guide/ai-skills)
|
||||
|
||||
Provider-specific developer notes:
|
||||
|
||||
- [AWS](https://docs.prowler.com/developer-guide/aws-details)
|
||||
- [Azure](https://docs.prowler.com/developer-guide/azure-details)
|
||||
- [Google Cloud](https://docs.prowler.com/developer-guide/gcp-details)
|
||||
- [Alibaba Cloud](https://docs.prowler.com/developer-guide/alibabacloud-details)
|
||||
- [Kubernetes](https://docs.prowler.com/developer-guide/kubernetes-details)
|
||||
- [Microsoft 365](https://docs.prowler.com/developer-guide/m365-details)
|
||||
- [GitHub](https://docs.prowler.com/developer-guide/github-details)
|
||||
- [LLM](https://docs.prowler.com/developer-guide/llm-details)
|
||||
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
|
||||
+21
-2
@@ -6,9 +6,12 @@ LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
ARG TRIVY_VERSION=0.69.2
|
||||
ARG TRIVY_VERSION=0.70.0
|
||||
ENV TRIVY_VERSION=${TRIVY_VERSION}
|
||||
|
||||
ARG ZIZMOR_VERSION=1.24.1
|
||||
ENV ZIZMOR_VERSION=${ZIZMOR_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
@@ -48,6 +51,22 @@ RUN ARCH=$(uname -m) && \
|
||||
mkdir -p /tmp/.cache/trivy && \
|
||||
chmod 777 /tmp/.cache/trivy
|
||||
|
||||
# Install zizmor for GitHub Actions workflow scanning
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
ZIZMOR_ARCH="x86_64-unknown-linux-gnu" ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
ZIZMOR_ARCH="aarch64-unknown-linux-gnu" ; \
|
||||
else \
|
||||
echo "Unsupported architecture for zizmor: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
wget --progress=dot:giga "https://github.com/zizmorcore/zizmor/releases/download/v${ZIZMOR_VERSION}/zizmor-${ZIZMOR_ARCH}.tar.gz" -O /tmp/zizmor.tar.gz && \
|
||||
mkdir -p /tmp/zizmor-extract && \
|
||||
tar zxf /tmp/zizmor.tar.gz -C /tmp/zizmor-extract && \
|
||||
mv /tmp/zizmor-extract/zizmor /usr/local/bin/zizmor && \
|
||||
chmod +x /usr/local/bin/zizmor && \
|
||||
rm -rf /tmp/zizmor.tar.gz /tmp/zizmor-extract
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
@@ -68,7 +87,7 @@ ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
pip install --no-cache-dir poetry==2.3.4
|
||||
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
@@ -104,22 +104,22 @@ Every AWS provider scan will enqueue an Attack Paths ingestion job automatically
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) | Support | Interface |
|
||||
|---|---|---|---|---|---|---|
|
||||
| AWS | 572 | 83 | 41 | 17 | Official | UI, API, CLI |
|
||||
| Azure | 165 | 20 | 18 | 13 | Official | UI, API, CLI |
|
||||
| GCP | 100 | 13 | 15 | 11 | Official | UI, API, CLI |
|
||||
| Kubernetes | 83 | 7 | 7 | 9 | Official | UI, API, CLI |
|
||||
| GitHub | 21 | 2 | 1 | 2 | Official | UI, API, CLI |
|
||||
| M365 | 89 | 9 | 4 | 5 | Official | UI, API, CLI |
|
||||
| OCI | 48 | 13 | 3 | 10 | Official | UI, API, CLI |
|
||||
| Alibaba Cloud | 61 | 9 | 3 | 9 | Official | UI, API, CLI |
|
||||
| Cloudflare | 29 | 2 | 0 | 5 | Official | UI, API, CLI |
|
||||
| AWS | 595 | 84 | 43 | 17 | Official | UI, API, CLI |
|
||||
| Azure | 167 | 22 | 19 | 16 | Official | UI, API, CLI |
|
||||
| GCP | 102 | 18 | 17 | 12 | Official | UI, API, CLI |
|
||||
| Kubernetes | 83 | 7 | 7 | 11 | Official | UI, API, CLI |
|
||||
| GitHub | 24 | 3 | 1 | 5 | Official | UI, API, CLI |
|
||||
| M365 | 101 | 10 | 4 | 10 | Official | UI, API, CLI |
|
||||
| OCI | 51 | 14 | 4 | 10 | Official | UI, API, CLI |
|
||||
| Alibaba Cloud | 61 | 9 | 4 | 9 | Official | UI, API, CLI |
|
||||
| Cloudflare | 29 | 3 | 0 | 5 | Official | UI, API, CLI |
|
||||
| IaC | [See `trivy` docs.](https://trivy.dev/latest/docs/coverage/iac/) | N/A | N/A | N/A | Official | UI, API, CLI |
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 8 | Official | UI, API, CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | CLI |
|
||||
| Image | N/A | N/A | N/A | N/A | Official | CLI, API |
|
||||
| Google Workspace | 1 | 1 | 0 | 1 | Official | CLI |
|
||||
| OpenStack | 27 | 4 | 0 | 8 | Official | UI, API, CLI |
|
||||
| Vercel | 30 | 6 | 0 | 5 | Official | CLI |
|
||||
| Google Workspace | 25 | 4 | 2 | 4 | Official | CLI |
|
||||
| OpenStack | 34 | 5 | 0 | 9 | Official | UI, API, CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
> [!Note]
|
||||
@@ -246,14 +246,7 @@ Some pre-commit hooks require tools installed on your system:
|
||||
|
||||
1. **Install [TruffleHog](https://github.com/trufflesecurity/trufflehog#install)** (secret scanning) — see the [official installation options](https://github.com/trufflesecurity/trufflehog#install).
|
||||
|
||||
2. **Install [Safety](https://github.com/pyupio/safety)** (dependency vulnerability checking):
|
||||
|
||||
```console
|
||||
# Requires a Python environment (e.g. via pyenv)
|
||||
pip install safety
|
||||
```
|
||||
|
||||
3. **Install [Hadolint](https://github.com/hadolint/hadolint#install)** (Dockerfile linting) — see the [official installation options](https://github.com/hadolint/hadolint#install).
|
||||
2. **Install [Hadolint](https://github.com/hadolint/hadolint#install)** (Dockerfile linting) — see the [official installation options](https://github.com/hadolint/hadolint#install).
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
@@ -307,6 +300,36 @@ python prowler-cli.py -v
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
|
||||
# 🛡️ GitHub Action
|
||||
|
||||
The official **Prowler GitHub Action** runs Prowler scans in your GitHub workflows using the official [`prowlercloud/prowler`](https://hub.docker.com/r/prowlercloud/prowler) Docker image. Scans run on any [supported provider](https://docs.prowler.com/user-guide/providers/), with optional [`--push-to-cloud`](https://docs.prowler.com/user-guide/tutorials/prowler-app-import-findings) to send findings to Prowler Cloud and optional SARIF upload so findings show up in the repo's **Security → Code scanning** tab and as inline PR annotations.
|
||||
|
||||
```yaml
|
||||
name: Prowler IaC Scan
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
prowler:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: prowler-cloud/prowler@5.25
|
||||
with:
|
||||
provider: iac
|
||||
output-formats: sarif json-ocsf
|
||||
upload-sarif: true
|
||||
flags: --severity critical high
|
||||
```
|
||||
|
||||
Full configuration, per-provider authentication, and SARIF examples: [Prowler GitHub Action tutorial](docs/user-guide/tutorials/prowler-app-github-action.mdx). Marketplace listing: [Prowler Security Scan](https://github.com/marketplace/actions/prowler-security-scan).
|
||||
|
||||
# ✏️ High level architecture
|
||||
|
||||
## Prowler App
|
||||
|
||||
+307
@@ -0,0 +1,307 @@
|
||||
name: Prowler Security Scan
|
||||
description: Run Prowler cloud security scanner using the official Docker image
|
||||
branding:
|
||||
icon: cloud
|
||||
color: green
|
||||
|
||||
inputs:
|
||||
provider:
|
||||
description: Cloud provider to scan (e.g. aws, azure, gcp, github, kubernetes, iac). See https://docs.prowler.com for supported providers.
|
||||
required: true
|
||||
image-tag:
|
||||
description: >
|
||||
Docker image tag for prowlercloud/prowler.
|
||||
Default is "stable" (latest release). Available tags:
|
||||
"stable" (latest release), "latest" (master branch, not stable),
|
||||
"<x.y.z>" (pinned release version).
|
||||
See all tags at https://hub.docker.com/r/prowlercloud/prowler/tags
|
||||
required: false
|
||||
default: stable
|
||||
output-formats:
|
||||
description: Output format(s) for scan results (e.g. "json-ocsf", "sarif json-ocsf")
|
||||
required: false
|
||||
default: json-ocsf
|
||||
push-to-cloud:
|
||||
description: Push scan findings to Prowler Cloud. Requires the PROWLER_CLOUD_API_KEY environment variable. See https://docs.prowler.com/user-guide/tutorials/prowler-app-import-findings#using-the-cli
|
||||
required: false
|
||||
default: "false"
|
||||
flags:
|
||||
description: 'Additional CLI flags passed to the Prowler scan (e.g. "--severity critical high --compliance cis_aws"). Values containing spaces can be quoted, e.g. "--resource-tag ''Environment=My Server''".'
|
||||
required: false
|
||||
default: ""
|
||||
extra-env:
|
||||
description: >
|
||||
Space-, newline-, or comma-separated list of host environment variable NAMES to forward to the Prowler container
|
||||
(e.g. "AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN" for AWS,
|
||||
"GITHUB_PERSONAL_ACCESS_TOKEN" for GitHub, "CLOUDFLARE_API_TOKEN" for Cloudflare).
|
||||
List names only; set the values via `env:` at the workflow or job level (typically from `secrets.*`).
|
||||
See the README for per-provider examples.
|
||||
required: false
|
||||
default: ""
|
||||
upload-sarif:
|
||||
description: 'Upload SARIF results to GitHub Code Scanning (requires "sarif" in output-formats and both `security-events: write` and `actions: read` permissions)'
|
||||
required: false
|
||||
default: "false"
|
||||
sarif-file:
|
||||
description: Path to the SARIF file to upload (auto-detected from output/ if not set)
|
||||
required: false
|
||||
default: ""
|
||||
sarif-category:
|
||||
description: Category for the SARIF upload (used to distinguish multiple analyses)
|
||||
required: false
|
||||
default: prowler
|
||||
fail-on-findings:
|
||||
description: Fail the workflow step when Prowler detects findings (exit code 3). By default the action tolerates findings and succeeds.
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_IMAGE_TAG: ${{ inputs.image-tag }}
|
||||
INPUT_UPLOAD_SARIF: ${{ inputs.upload-sarif }}
|
||||
INPUT_OUTPUT_FORMATS: ${{ inputs.output-formats }}
|
||||
run: |
|
||||
# Validate image tag format (alphanumeric, dots, hyphens, underscores only)
|
||||
if [[ ! "$INPUT_IMAGE_TAG" =~ ^[a-zA-Z0-9._-]+$ ]]; then
|
||||
echo "::error::Invalid image-tag '${INPUT_IMAGE_TAG}'. Must contain only alphanumeric characters, dots, hyphens, and underscores."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Warn if upload-sarif is enabled but sarif not in output-formats
|
||||
if [ "$INPUT_UPLOAD_SARIF" = "true" ]; then
|
||||
if [[ ! "$INPUT_OUTPUT_FORMATS" =~ (^|[[:space:]])sarif($|[[:space:]]) ]]; then
|
||||
echo "::warning::upload-sarif is enabled but 'sarif' is not included in output-formats ('${INPUT_OUTPUT_FORMATS}'). SARIF upload will fail unless you add 'sarif' to output-formats."
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Run Prowler scan
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_PROVIDER: ${{ inputs.provider }}
|
||||
INPUT_IMAGE_TAG: ${{ inputs.image-tag }}
|
||||
INPUT_OUTPUT_FORMATS: ${{ inputs.output-formats }}
|
||||
INPUT_PUSH_TO_CLOUD: ${{ inputs.push-to-cloud }}
|
||||
INPUT_FLAGS: ${{ inputs.flags }}
|
||||
INPUT_EXTRA_ENV: ${{ inputs.extra-env }}
|
||||
INPUT_FAIL_ON_FINDINGS: ${{ inputs.fail-on-findings }}
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Parse space-separated inputs with shlex so values with spaces can be quoted
|
||||
# (e.g. `--resource-tag 'Environment=My Server'`).
|
||||
mapfile -t OUTPUT_FORMATS < <(python3 -c 'import shlex, os; [print(t) for t in shlex.split(os.environ.get("INPUT_OUTPUT_FORMATS", ""))]')
|
||||
mapfile -t EXTRA_FLAGS < <(python3 -c 'import shlex, os; [print(t) for t in shlex.split(os.environ.get("INPUT_FLAGS", ""))]')
|
||||
mapfile -t EXTRA_ENV_NAMES < <(python3 -c 'import shlex, os; [print(t) for t in shlex.split(os.environ.get("INPUT_EXTRA_ENV", "").replace(",", " "))]')
|
||||
|
||||
env_args=()
|
||||
for var in "${EXTRA_ENV_NAMES[@]}"; do
|
||||
[ -z "$var" ] && continue
|
||||
if [[ ! "$var" =~ ^[A-Za-z_][A-Za-z0-9_]*$ ]]; then
|
||||
echo "::error::Invalid env var name '${var}' in extra-env. Names must match ^[A-Za-z_][A-Za-z0-9_]*$."
|
||||
exit 1
|
||||
fi
|
||||
env_args+=("-e" "$var")
|
||||
done
|
||||
|
||||
push_args=()
|
||||
if [ "$INPUT_PUSH_TO_CLOUD" = "true" ]; then
|
||||
push_args=("--push-to-cloud")
|
||||
env_args+=("-e" "PROWLER_CLOUD_API_KEY")
|
||||
fi
|
||||
|
||||
mkdir -p "$GITHUB_WORKSPACE/output"
|
||||
chmod 777 "$GITHUB_WORKSPACE/output"
|
||||
|
||||
set +e
|
||||
docker run --rm \
|
||||
"${env_args[@]}" \
|
||||
-v "$GITHUB_WORKSPACE:/home/prowler/workspace" \
|
||||
-v "$GITHUB_WORKSPACE/output:/home/prowler/workspace/output" \
|
||||
-w /home/prowler/workspace \
|
||||
"prowlercloud/prowler:${INPUT_IMAGE_TAG}" \
|
||||
"$INPUT_PROVIDER" \
|
||||
--output-formats "${OUTPUT_FORMATS[@]}" \
|
||||
"${push_args[@]}" \
|
||||
"${EXTRA_FLAGS[@]}"
|
||||
exit_code=$?
|
||||
set -e
|
||||
|
||||
# Exit code 3 = findings detected
|
||||
if [ "$exit_code" -eq 3 ] && [ "$INPUT_FAIL_ON_FINDINGS" != "true" ]; then
|
||||
echo "::notice::Prowler detected findings (exit code 3). Set fail-on-findings to 'true' to fail the workflow on findings."
|
||||
exit 0
|
||||
fi
|
||||
exit $exit_code
|
||||
|
||||
- name: Upload scan results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: prowler-${{ inputs.provider }}
|
||||
path: output/
|
||||
retention-days: 30
|
||||
if-no-files-found: warn
|
||||
|
||||
- name: Find SARIF file
|
||||
if: always() && inputs.upload-sarif == 'true'
|
||||
id: find-sarif
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_SARIF_FILE: ${{ inputs.sarif-file }}
|
||||
run: |
|
||||
if [ -n "$INPUT_SARIF_FILE" ]; then
|
||||
echo "sarif_path=$INPUT_SARIF_FILE" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
sarif_file=$(find output/ -name '*.sarif' -type f | head -1)
|
||||
if [ -z "$sarif_file" ]; then
|
||||
echo "::warning::No .sarif file found in output/. Ensure 'sarif' is included in output-formats."
|
||||
echo "sarif_path=" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "sarif_path=$sarif_file" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Upload SARIF to GitHub Code Scanning
|
||||
if: always() && inputs.upload-sarif == 'true' && steps.find-sarif.outputs.sarif_path != ''
|
||||
uses: github/codeql-action/upload-sarif@d4b3ca9fa7f69d38bfcd667bdc45bc373d16277e # v4
|
||||
with:
|
||||
sarif_file: ${{ steps.find-sarif.outputs.sarif_path }}
|
||||
category: ${{ inputs.sarif-category }}
|
||||
|
||||
- name: Write scan summary
|
||||
if: always()
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_PROVIDER: ${{ inputs.provider }}
|
||||
INPUT_UPLOAD_SARIF: ${{ inputs.upload-sarif }}
|
||||
INPUT_PUSH_TO_CLOUD: ${{ inputs.push-to-cloud }}
|
||||
RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
REPO_URL: ${{ github.server_url }}/${{ github.repository }}
|
||||
BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set +e
|
||||
|
||||
# Build a link to the scan step in the workflow logs. Requires `actions: read`
|
||||
# on the caller's GITHUB_TOKEN; silently skips the link if unavailable.
|
||||
scan_step_url=""
|
||||
if [ -n "${GH_TOKEN:-}" ] && command -v gh >/dev/null 2>&1; then
|
||||
job_info=$(gh api \
|
||||
"repos/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}/attempts/${GITHUB_RUN_ATTEMPT:-1}/jobs" \
|
||||
--jq ".jobs[] | select(.runner_name == \"${RUNNER_NAME:-}\")" 2>/dev/null)
|
||||
if [ -n "$job_info" ]; then
|
||||
job_id=$(jq -r '.id // empty' <<<"$job_info")
|
||||
step_number=$(jq -r '[.steps[]? | select((.name // "") | test("Run Prowler scan"; "i")) | .number] | first // empty' <<<"$job_info")
|
||||
if [ -z "$step_number" ]; then
|
||||
step_number=$(jq -r '[.steps[]? | select(.status == "in_progress") | .number] | first // empty' <<<"$job_info")
|
||||
fi
|
||||
if [ -n "$job_id" ] && [ -n "$step_number" ]; then
|
||||
scan_step_url="${REPO_URL}/actions/runs/${GITHUB_RUN_ID}/job/${job_id}#step:${step_number}:1"
|
||||
elif [ -n "$job_id" ]; then
|
||||
scan_step_url="${REPO_URL}/actions/runs/${GITHUB_RUN_ID}/job/${job_id}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Map provider code to a properly-cased display name.
|
||||
case "$INPUT_PROVIDER" in
|
||||
alibabacloud) provider_name="Alibaba Cloud" ;;
|
||||
aws) provider_name="AWS" ;;
|
||||
azure) provider_name="Azure" ;;
|
||||
cloudflare) provider_name="Cloudflare" ;;
|
||||
gcp) provider_name="GCP" ;;
|
||||
github) provider_name="GitHub" ;;
|
||||
googleworkspace) provider_name="Google Workspace" ;;
|
||||
iac) provider_name="IaC" ;;
|
||||
image) provider_name="Container Image" ;;
|
||||
kubernetes) provider_name="Kubernetes" ;;
|
||||
llm) provider_name="LLM" ;;
|
||||
m365) provider_name="Microsoft 365" ;;
|
||||
mongodbatlas) provider_name="MongoDB Atlas" ;;
|
||||
nhn) provider_name="NHN" ;;
|
||||
openstack) provider_name="OpenStack" ;;
|
||||
oraclecloud) provider_name="Oracle Cloud" ;;
|
||||
vercel) provider_name="Vercel" ;;
|
||||
*) provider_name="${INPUT_PROVIDER^}" ;;
|
||||
esac
|
||||
|
||||
ocsf_file=$(find output/ -name '*.ocsf.json' -type f 2>/dev/null | head -1)
|
||||
|
||||
{
|
||||
echo "## Prowler ${provider_name} Scan Summary"
|
||||
echo ""
|
||||
|
||||
counts=""
|
||||
if [ -n "$ocsf_file" ] && [ -s "$ocsf_file" ]; then
|
||||
counts=$(jq -r '[
|
||||
length,
|
||||
([.[] | select(.status_code == "FAIL")] | length),
|
||||
([.[] | select(.status_code == "PASS")] | length),
|
||||
([.[] | select(.status_code == "MUTED")] | length),
|
||||
([.[] | select(.status_code == "FAIL" and .severity == "Critical")] | length),
|
||||
([.[] | select(.status_code == "FAIL" and .severity == "High")] | length),
|
||||
([.[] | select(.status_code == "FAIL" and .severity == "Medium")] | length),
|
||||
([.[] | select(.status_code == "FAIL" and .severity == "Low")] | length),
|
||||
([.[] | select(.status_code == "FAIL" and .severity == "Informational")] | length)
|
||||
] | @tsv' "$ocsf_file" 2>/dev/null)
|
||||
fi
|
||||
|
||||
if [ -n "$counts" ]; then
|
||||
read -r total fail pass muted critical high medium low info <<<"$counts"
|
||||
|
||||
line="**${fail:-0} failing** · ${pass:-0} passing"
|
||||
[ "${muted:-0}" -gt 0 ] && line="${line} · ${muted} muted"
|
||||
echo "${line} — ${total:-0} checks total"
|
||||
echo ""
|
||||
echo "| Severity | Failing |"
|
||||
echo "|----------|---------|"
|
||||
echo "| ‼️ Critical | ${critical:-0} |"
|
||||
echo "| 🔴 High | ${high:-0} |"
|
||||
echo "| 🟠 Medium | ${medium:-0} |"
|
||||
echo "| 🔵 Low | ${low:-0} |"
|
||||
echo "| ⚪ Informational | ${info:-0} |"
|
||||
echo ""
|
||||
else
|
||||
echo "_No findings report was produced. Check the scan logs above._"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ -n "$scan_step_url" ]; then
|
||||
echo "**Scan logs:** [view in workflow run](${scan_step_url})"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
echo "**Get the full report:** [\`prowler-${INPUT_PROVIDER}\` artifact](${RUN_URL}#artifacts)"
|
||||
|
||||
if [ "$INPUT_UPLOAD_SARIF" = "true" ] && [ -n "$BRANCH" ]; then
|
||||
encoded_branch=$(jq -nr --arg b "$BRANCH" '$b|@uri')
|
||||
echo ""
|
||||
echo "**See results in GitHub Code Security:** [open alerts on \`${BRANCH}\`](${REPO_URL}/security/code-scanning?query=is%3Aopen+branch%3A${encoded_branch})"
|
||||
fi
|
||||
|
||||
if [ "$INPUT_PUSH_TO_CLOUD" != "true" ]; then
|
||||
echo ""
|
||||
echo "---"
|
||||
echo ""
|
||||
echo "### Scale ${provider_name} security with Prowler Cloud ☁️"
|
||||
echo ""
|
||||
echo "Send this scan's findings to **[Prowler Cloud](https://cloud.prowler.com)** and get:"
|
||||
echo ""
|
||||
echo "- **Unified findings** across every cloud, SaaS provider (M365, Google Workspace, GitHub, MongoDB Atlas), IaC repo, Kubernetes cluster, and container image"
|
||||
echo "- **Posture over time** with alerts, and notifications"
|
||||
echo "- **Prowler Lighthouse AI**: agentic assistant that triages findings, explains root cause and helps with remediation"
|
||||
echo "- **50+ Compliance frameworks** mapped automatically"
|
||||
echo "- **Enterprise-ready platform**: SOC 2 Type 2, SSO/SAML, AWS Security Hub, S3 and Jira integrations"
|
||||
echo ""
|
||||
echo "**Get started in 3 steps:**"
|
||||
echo "1. Create an account at [cloud.prowler.com](https://cloud.prowler.com)"
|
||||
echo "2. Generate a Prowler Cloud API key ([docs](https://docs.prowler.com/user-guide/tutorials/prowler-app-import-findings#using-the-cli))"
|
||||
echo "3. Add \`PROWLER_CLOUD_API_KEY\` to your GitHub secrets and set \`push-to-cloud: true\` on this action"
|
||||
echo ""
|
||||
echo "See [prowler.com/pricing](https://prowler.com/pricing) for plan details."
|
||||
fi
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
+159
-6
@@ -2,23 +2,176 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.24.0] (Prowler UNRELEASED)
|
||||
## [1.28.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- Pin all unpinned dependencies to exact versions to prevent supply chain attacks and ensure reproducible builds [(#10469)](https://github.com/prowler-cloud/prowler/pull/10469)
|
||||
- Filter RBAC role lookup by `tenant_id` to prevent cross-tenant privilege leak [(#10491)](https://github.com/prowler-cloud/prowler/pull/10491)
|
||||
- `VALKEY_SCHEME`, `VALKEY_USERNAME`, and `VALKEY_PASSWORD` environment variables to configure Celery broker TLS/auth connection details for Valkey/ElastiCache [(#10420)](https://github.com/prowler-cloud/prowler/pull/10420)
|
||||
- `Vercel` provider support [(#10190)](https://github.com/prowler-cloud/prowler/pull/10190)
|
||||
- Finding groups list and latest endpoints support `sort=delta`, ordering by `new_count` then `changed_count` so groups with the most new findings rank highest [(#10606)](https://github.com/prowler-cloud/prowler/pull/10606)
|
||||
- GIN index on `findings(categories, resource_services, resource_regions, resource_types)` to speed up `/api/v1/finding-groups` array filters [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Remove orphaned `gin_resources_search_idx` declaration from `Resource.Meta.indexes` (DB index dropped in `0072_drop_unused_indexes`) [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
- PDF compliance reports cap detail tables at 100 failed findings per check (configurable via `DJANGO_PDF_MAX_FINDINGS_PER_CHECK`) to bound worker memory on large scans [(#11160)](https://github.com/prowler-cloud/prowler/pull/11160)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.2] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Attack Paths: BEDROCK-001 and BEDROCK-002 now target roles trusting `bedrock-agentcore.amazonaws.com` instead of `bedrock.amazonaws.com`, eliminating false positives against regular Bedrock service roles (Agents, Knowledge Bases, model invocation) [(#11141)](https://github.com/prowler-cloud/prowler/pull/11141)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `POST /api/v1/scans` was intermittently failing with `Scan matching query does not exist` in the `scan-perform` worker; the Celery task is now published via `transaction.on_commit` so the worker cannot read the Scan before the dispatch-wide transaction commits [(#11122)](https://github.com/prowler-cloud/prowler/pull/11122)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.0] (Prowler v5.26.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- ASD Essential Eight (AWS) compliance framework support [(#10982)](https://github.com/prowler-cloud/prowler/pull/10982)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `trivy` binary from 0.69.2 to 0.70.0 and `cryptography` from 46.0.6 to 46.0.7 (transitive via prowler SDK) in the API image for CVE-2026-33186 and CVE-2026-39892 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
---
|
||||
|
||||
## [1.26.1] (Prowler v5.25.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Attack Paths: AWS scans no longer fail when enabled regions cannot be retrieved, and scans stuck in `scheduled` state are now cleaned up after the stale threshold [(#10917)](https://github.com/prowler-cloud/prowler/pull/10917)
|
||||
- Scan report and compliance downloads now redirect to a presigned S3 URL instead of streaming through the API worker, preventing gunicorn timeouts on large files [(#10927)](https://github.com/prowler-cloud/prowler/pull/10927)
|
||||
|
||||
---
|
||||
|
||||
## [1.26.0] (Prowler v5.25.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- CIS Benchmark PDF report generation for scans, exposing the latest CIS version per provider via `GET /scans/{id}/cis/{name}/` [(#10650)](https://github.com/prowler-cloud/prowler/pull/10650)
|
||||
- `/overviews/resource-groups` (resource inventory), `/overviews/categories` and `/overviews/attack-surfaces` now reflect newly-muted findings without waiting for the next scan. The post-mute `reaggregate-all-finding-group-summaries` task now also dispatches `aggregate_scan_resource_group_summaries_task`, `aggregate_scan_category_summaries_task` and `aggregate_attack_surface_task` per latest scan of every `(provider, day)` pair, rebuilding `ScanGroupSummary`, `ScanCategorySummary` and `AttackSurfaceOverview` alongside the tables already covered in #10827 [(#10843)](https://github.com/prowler-cloud/prowler/pull/10843)
|
||||
- Install zizmor v1.24.1 in API Docker image for GitHub Actions workflow scanning [(#10607)](https://github.com/prowler-cloud/prowler/pull/10607)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Allows tenant owners to expel users from their organizations [(#10787)](https://github.com/prowler-cloud/prowler/pull/10787)
|
||||
- `aggregate_findings`, `aggregate_attack_surface`, `aggregate_scan_resource_group_summaries` and `aggregate_scan_category_summaries` now upsert via `bulk_create(update_conflicts=True, ...)` instead of the prior `ignore_conflicts=True` / plain INSERT / `already backfilled` short-circuit. Re-runs triggered by the post-mute reaggregation pipeline no longer trip the `unique_*_per_scan` constraints nor silently drop updates, and are race-safe under concurrent writers (e.g. scan completion overlapping with a fresh mute rule) [(#10843)](https://github.com/prowler-cloud/prowler/pull/10843)
|
||||
- Rename the scan-category and scan-resource-group summary aggregators from `backfill_*` to `aggregate_*` [(#10843)](https://github.com/prowler-cloud/prowler/pull/10843)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `generate_outputs_task` crashing with `KeyError` for compliance frameworks listed by `get_compliance_frameworks` but not loadable by `Compliance.get_bulk` [(#10903)](https://github.com/prowler-cloud/prowler/pull/10903)
|
||||
|
||||
---
|
||||
|
||||
## [1.25.4] (Prowler v5.24.4)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `DJANGO_SENTRY_TRACES_SAMPLE_RATE` env var (default `0.02`) enables Sentry performance tracing for the API [(#10873)](https://github.com/prowler-cloud/prowler/pull/10873)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Attack Paths: Neo4j driver `connection_acquisition_timeout` is now configurable via `NEO4J_CONN_ACQUISITION_TIMEOUT` (default lowered from 120 s to 15 s) [(#10873)](https://github.com/prowler-cloud/prowler/pull/10873)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `/tmp/prowler_api_output` saturation in compliance report workers: the final `rmtree` in `generate_compliance_reports` now only waits on frameworks actually generated for the provider (so unsupported frameworks no longer leave a placeholder `results` entry that blocks cleanup), output directories are created lazily per enabled framework, and both `generate_compliance_reports` and `generate_outputs_task` run an opportunistic stale cleanup at task start with a 48h age threshold, a per-host `fcntl` throttle, a 50-deletions-per-run cap, and guards that protect EXECUTING scans and scans whose `output_location` still points to a local path (metadata lookups routed through the admin DB so RLS does not hide those rows) [(#10874)](https://github.com/prowler-cloud/prowler/pull/10874)
|
||||
|
||||
---
|
||||
|
||||
## [1.25.3] (Prowler v5.24.3)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `/overviews/findings`, `/overviews/findings-severity` and `/overviews/services` now reflect newly-muted findings without waiting for the next scan. The post-mute `reaggregate-all-finding-group-summaries` task was extended to re-run the same per-scan pipeline that scan completion runs (`ScanSummary`, `DailySeveritySummary`, `FindingGroupDailySummary`) on the latest scan of every `(provider, day)` pair, keeping the pre-aggregated tables in sync with `Finding.muted` updates [(#10827)](https://github.com/prowler-cloud/prowler/pull/10827)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Finding groups aggregated `status` now treats muted findings as resolved: a group is `FAIL` only while at least one non-muted FAIL remains, otherwise it is `PASS` (including fully-muted groups). The `filter[status]` filter and the `sort=status` ordering share the same semantics, keeping `status` consistent with `fail_count` and the orthogonal `muted` flag [(#10825)](https://github.com/prowler-cloud/prowler/pull/10825)
|
||||
- `aggregate_findings` is now idempotent: it deletes the scan's existing `ScanSummary` rows before `bulk_create`, so re-runs (such as the post-mute reaggregation pipeline) no longer violate the `unique_scan_summary` constraint and no longer abort the downstream `DailySeveritySummary` / `FindingGroupDailySummary` recomputation for the affected scan [(#10827)](https://github.com/prowler-cloud/prowler/pull/10827)
|
||||
- Attack Paths: Findings on AWS were silently dropped during the Neo4j merge for resources whose Cartography node is keyed by a short identifier (e.g. EC2 instances) rather than the full ARN [(#10839)](https://github.com/prowler-cloud/prowler/pull/10839)
|
||||
|
||||
---
|
||||
|
||||
## [1.25.2] (Prowler v5.24.2)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Finding groups `/resources` endpoints now materialize the filtered finding IDs into a Python list before filtering `ResourceFindingMapping`, so PostgreSQL switches from a Merge Semi Join that read hundreds of thousands of RFM index entries to a Nested Loop Index Scan over `finding_id`. The `has_mappings.exists()` pre-check is removed, and a request-scoped cache deduplicates the finding-id round-trip across the helpers that build different RFM querysets [(#10816)](https://github.com/prowler-cloud/prowler/pull/10816)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `/finding-groups/latest/<check_id>/resources` now selects the latest completed scan per provider by `-completed_at` (then `-inserted_at`) instead of `-inserted_at`, matching the `/finding-groups/latest` summary path and the daily-summary upsert so overlapping scans no longer produce diverging `delta`/`new_count` between the two endpoints [(#10802)](https://github.com/prowler-cloud/prowler/pull/10802)
|
||||
|
||||
|
||||
## [1.25.1] (Prowler v5.24.1)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Attack Paths: Restore `SYNC_BATCH_SIZE` and `FINDINGS_BATCH_SIZE` defaults to 1000, upgrade Cartography to 0.135.0, enable Celery queue priority for cleanup task, rewrite Finding insertion, remove AWS graph cleanup and add timing logs [(#10729)](https://github.com/prowler-cloud/prowler/pull/10729)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Finding group resources endpoints now include findings without associated resources (orphaned IaC findings) as simulated resource rows, and return one row per finding when multiple findings share a resource [(#10708)](https://github.com/prowler-cloud/prowler/pull/10708)
|
||||
- Attack Paths: Missing `tenant_id` filter while getting related findings after scan completes [(#10722)](https://github.com/prowler-cloud/prowler/pull/10722)
|
||||
- Finding group counters `pass_count`, `fail_count` and `manual_count` now exclude muted findings [(#10753)](https://github.com/prowler-cloud/prowler/pull/10753)
|
||||
- Silent data loss in `ResourceFindingMapping` bulk insert that left findings orphaned when `INSERT ... ON CONFLICT DO NOTHING` dropped rows without raising; added explicit `unique_fields` [(#10724)](https://github.com/prowler-cloud/prowler/pull/10724)
|
||||
- `DELETE /tenants/{tenant_pk}/memberships/{id}` now deletes the expelled user's account when the removed membership was their last one, and blacklists every outstanding refresh token for that user so their existing sessions can no longer mint new access tokens [(#10787)](https://github.com/prowler-cloud/prowler/pull/10787)
|
||||
|
||||
---
|
||||
|
||||
## [1.25.0] (Prowler v5.24.0)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Bump Poetry to `2.3.4` in Dockerfile and pre-commit hooks. Regenerate `api/poetry.lock` [(#10681)](https://github.com/prowler-cloud/prowler/pull/10681)
|
||||
- Attack Paths: Remove dead `cleanup_findings` no-op and its supporting `prowler_finding_lastupdated` index [(#10684)](https://github.com/prowler-cloud/prowler/pull/10684)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Worker-beat race condition on cold start: replaced `sleep 15` with API service healthcheck dependency (Docker Compose) and init containers (Helm), aligned Gunicorn default port to `8080` [(#10603)](https://github.com/prowler-cloud/prowler/pull/10603)
|
||||
- API container startup crash on Linux due to root-owned bind-mount preventing JWT key generation [(#10646)](https://github.com/prowler-cloud/prowler/pull/10646)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `pytest` from 8.2.2 to 9.0.3 to fix CVE-2025-71176 [(#10678)](https://github.com/prowler-cloud/prowler/pull/10678)
|
||||
|
||||
---
|
||||
|
||||
## [1.24.0] (Prowler v5.23.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- RBAC role lookup filtered by `tenant_id` to prevent cross-tenant privilege leak [(#10491)](https://github.com/prowler-cloud/prowler/pull/10491)
|
||||
- `VALKEY_SCHEME`, `VALKEY_USERNAME`, and `VALKEY_PASSWORD` environment variables to configure Celery broker TLS/auth connection details for Valkey/ElastiCache [(#10420)](https://github.com/prowler-cloud/prowler/pull/10420)
|
||||
- `Vercel` provider support [(#10190)](https://github.com/prowler-cloud/prowler/pull/10190)
|
||||
- Finding groups list and latest endpoints support `sort=delta`, ordering by `new_count` then `changed_count` so groups with the most new findings rank highest [(#10606)](https://github.com/prowler-cloud/prowler/pull/10606)
|
||||
- Finding group resources endpoints (`/finding-groups/{check_id}/resources` and `/finding-groups/latest/{check_id}/resources`) now expose `finding_id` per row, pointing to the most recent matching Finding for each resource. UUIDv7 ordering guarantees `Max(finding__id)` resolves to the latest snapshot [(#10630)](https://github.com/prowler-cloud/prowler/pull/10630)
|
||||
- Handle CIS and CISA SCuBA compliance framework from google workspace [(#10629)](https://github.com/prowler-cloud/prowler/pull/10629)
|
||||
- Sort support for all finding group counter fields: `pass_muted_count`, `fail_muted_count`, `manual_muted_count`, and all `new_*`/`changed_*` status-mute breakdown counters [(#10655)](https://github.com/prowler-cloud/prowler/pull/10655)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Finding groups list/latest/resources now expose `status` ∈ `{FAIL, PASS, MANUAL}` and `muted: bool` as orthogonal fields. The aggregated `status` reflects the underlying check outcome regardless of mute state, and `muted=true` signals that every finding in the group/resource is muted. New `manual_count` is exposed alongside `pass_count`/`fail_count`, plus `pass_muted_count`/`fail_muted_count`/`manual_muted_count` siblings so clients can isolate the muted half of each status. The `new_*`/`changed_*` deltas are now broken down by status and mute state via 12 new counters (`new_fail_count`, `new_fail_muted_count`, `new_pass_count`, `new_pass_muted_count`, `new_manual_count`, `new_manual_muted_count` and the matching `changed_*` set). New `filter[muted]=true|false` and `sort=status` (FAIL > PASS > MANUAL) / `sort=muted` are supported. `filter[status]=MUTED` is no longer accepted [(#10630)](https://github.com/prowler-cloud/prowler/pull/10630)
|
||||
- Attack Paths: Periodic cleanup of stale scans with dead-worker detection via Celery inspect, marking orphaned `EXECUTING` scans as `FAILED` and recovering `graph_data_ready` [(#10387)](https://github.com/prowler-cloud/prowler/pull/10387)
|
||||
- Attack Paths: Replace `_provider_id` property with `_Provider_{uuid}` label for provider isolation, add regex-based label injection for custom queries [(#10402)](https://github.com/prowler-cloud/prowler/pull/10402)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `reaggregate_all_finding_group_summaries_task` now refreshes finding group daily summaries for every `(provider, day)` combination instead of only the latest scan per provider, matching the unbounded scope of `mute_historical_findings_task`. Mute rule operations no longer leave older daily summaries drifting from the underlying muted findings [(#10630)](https://github.com/prowler-cloud/prowler/pull/10630)
|
||||
- Finding groups list/latest now apply computed status/severity filters and finding-level prefilters (delta, region, service, category, resource group, scan, resource type), plus `check_title` support for sort/filter consistency [(#10428)](https://github.com/prowler-cloud/prowler/pull/10428)
|
||||
- Populate compliance data inside `check_metadata` for findings, which was always returned as `null` [(#10449)](https://github.com/prowler-cloud/prowler/pull/10449)
|
||||
- 403 error for admin users listing tenants due to roles query not using the admin database connection [(#10460)](https://github.com/prowler-cloud/prowler/pull/10460)
|
||||
|
||||
+22
-2
@@ -5,9 +5,12 @@ LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
ARG TRIVY_VERSION=0.69.2
|
||||
ARG TRIVY_VERSION=0.70.0
|
||||
ENV TRIVY_VERSION=${TRIVY_VERSION}
|
||||
|
||||
ARG ZIZMOR_VERSION=1.24.1
|
||||
ENV ZIZMOR_VERSION=${ZIZMOR_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget \
|
||||
@@ -22,6 +25,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libtool \
|
||||
libxslt1-dev \
|
||||
python3-dev \
|
||||
git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
@@ -57,6 +61,22 @@ RUN ARCH=$(uname -m) && \
|
||||
mkdir -p /tmp/.cache/trivy && \
|
||||
chmod 777 /tmp/.cache/trivy
|
||||
|
||||
# Install zizmor for GitHub Actions workflow scanning
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
ZIZMOR_ARCH="x86_64-unknown-linux-gnu" ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
ZIZMOR_ARCH="aarch64-unknown-linux-gnu" ; \
|
||||
else \
|
||||
echo "Unsupported architecture for zizmor: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
wget --progress=dot:giga "https://github.com/zizmorcore/zizmor/releases/download/v${ZIZMOR_VERSION}/zizmor-${ZIZMOR_ARCH}.tar.gz" -O /tmp/zizmor.tar.gz && \
|
||||
mkdir -p /tmp/zizmor-extract && \
|
||||
tar zxf /tmp/zizmor.tar.gz -C /tmp/zizmor-extract && \
|
||||
mv /tmp/zizmor-extract/zizmor /usr/local/bin/zizmor && \
|
||||
chmod +x /usr/local/bin/zizmor && \
|
||||
rm -rf /tmp/zizmor.tar.gz /tmp/zizmor-extract
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
@@ -71,7 +91,7 @@ RUN mkdir -p /tmp/prowler_api_output
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
pip install --no-cache-dir poetry==2.3.4
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
|
||||
@@ -56,7 +56,6 @@ start_worker() {
|
||||
|
||||
start_worker_beat() {
|
||||
echo "Starting the worker-beat..."
|
||||
sleep 15
|
||||
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
}
|
||||
|
||||
|
||||
Generated
+202
-150
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -682,21 +682,21 @@ requests = ">=2.21.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-tea-openapi"
|
||||
version = "0.4.1"
|
||||
version = "0.4.4"
|
||||
description = "Alibaba Cloud openapi SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_tea_openapi-0.4.1-py3-none-any.whl", hash = "sha256:e46bfa3ca34086d2c357d217a0b7284ecbd4b3bab5c88e075e73aec637b0e4a0"},
|
||||
{file = "alibabacloud_tea_openapi-0.4.1.tar.gz", hash = "sha256:2384b090870fdb089c3c40f3fb8cf0145b8c7d6c14abbac521f86a01abb5edaf"},
|
||||
{file = "alibabacloud_tea_openapi-0.4.4-py3-none-any.whl", hash = "sha256:cea6bc1fe35b0319a8752cb99eb0ecb0dab7ca1a71b99c12970ba0867410995f"},
|
||||
{file = "alibabacloud_tea_openapi-0.4.4.tar.gz", hash = "sha256:1b0917bc03cd49417da64945e92731716d53e2eb8707b235f54e45b7473221ce"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-credentials = ">=1.0.2,<2.0.0"
|
||||
alibabacloud-gateway-spi = ">=0.0.2,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
cryptography = ">=3.0.0,<45.0.0"
|
||||
cryptography = {version = ">=3.0.0,<47.0.0", markers = "python_version >= \"3.8\""}
|
||||
darabonba-core = ">=1.0.3,<2.0.0"
|
||||
|
||||
[[package]]
|
||||
@@ -1526,19 +1526,19 @@ typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-resource"
|
||||
version = "23.3.0"
|
||||
version = "24.0.0"
|
||||
description = "Microsoft Azure Resource Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_mgmt_resource-23.3.0-py3-none-any.whl", hash = "sha256:ab216ee28e29db6654b989746e0c85a1181f66653929d2cb6e48fba66d9af323"},
|
||||
{file = "azure_mgmt_resource-23.3.0.tar.gz", hash = "sha256:fc4f1fd8b6aad23f8af4ed1f913df5f5c92df117449dc354fea6802a2829fea4"},
|
||||
{file = "azure_mgmt_resource-24.0.0-py3-none-any.whl", hash = "sha256:27b32cd223e2784269f5a0db3c282042886ee4072d79cedc638438ece7cd0df4"},
|
||||
{file = "azure_mgmt_resource-24.0.0.tar.gz", hash = "sha256:cf6b8995fcdd407ac9ff1dd474087129429a1d90dbb1ac77f97c19b96237b265"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1"
|
||||
azure-mgmt-core = ">=1.3.2"
|
||||
azure-mgmt-core = ">=1.5.0"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
@@ -1822,19 +1822,19 @@ crt = ["awscrt (==0.27.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "cartography"
|
||||
version = "0.132.0"
|
||||
version = "0.135.0"
|
||||
description = "Explore assets and their relationships across your technical infrastructure."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cartography-0.132.0-py3-none-any.whl", hash = "sha256:c070aa51d0ab4479cb043cae70b35e7df49f2fb5f1fa95ccf10000bbeb952262"},
|
||||
{file = "cartography-0.132.0.tar.gz", hash = "sha256:7c6332bc57fd2629d7b83aee7bd95a7b2edb0d51ef746efa0461399e0b66625c"},
|
||||
{file = "cartography-0.135.0-py3-none-any.whl", hash = "sha256:c62c32a6917b8f23a8b98fe2b6c7c4a918b50f55918482966c4dae1cf5f538e1"},
|
||||
{file = "cartography-0.135.0.tar.gz", hash = "sha256:3f500cd22c3b392d00e8b49f62acc95fd4dcd559ce514aafe2eb8101133c7a49"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
adal = ">=1.2.4"
|
||||
aioboto3 = ">=13.0.0"
|
||||
aioboto3 = ">=15.0.0"
|
||||
azure-cli-core = ">=2.26.0"
|
||||
azure-identity = ">=1.5.0"
|
||||
azure-keyvault-certificates = ">=4.0.0"
|
||||
@@ -1852,9 +1852,9 @@ azure-mgmt-keyvault = ">=10.0.0"
|
||||
azure-mgmt-logic = ">=10.0.0"
|
||||
azure-mgmt-monitor = ">=3.0.0"
|
||||
azure-mgmt-network = ">=25.0.0"
|
||||
azure-mgmt-resource = ">=10.2.0,<25.0.0"
|
||||
azure-mgmt-resource = ">=24.0.0,<25"
|
||||
azure-mgmt-security = ">=5.0.0"
|
||||
azure-mgmt-sql = ">=3.0.1,<4"
|
||||
azure-mgmt-sql = ">=3.0.1"
|
||||
azure-mgmt-storage = ">=16.0.0"
|
||||
azure-mgmt-synapse = ">=2.0.0"
|
||||
azure-mgmt-web = ">=7.0.0"
|
||||
@@ -1862,38 +1862,39 @@ azure-synapse-artifacts = ">=0.17.0"
|
||||
backoff = ">=2.1.2"
|
||||
boto3 = ">=1.15.1"
|
||||
botocore = ">=1.18.1"
|
||||
cloudflare = ">=4.1.0,<5.0.0"
|
||||
cloudflare = ">=4.1.0"
|
||||
crowdstrike-falconpy = ">=0.5.1"
|
||||
cryptography = "*"
|
||||
dnspython = ">=1.15.0"
|
||||
duo-client = "*"
|
||||
google-api-python-client = ">=1.7.8"
|
||||
cryptography = ">=45.0.0"
|
||||
dnspython = ">=2.0.0"
|
||||
duo-client = ">=5.5.0"
|
||||
google-api-python-client = ">=2.0.0"
|
||||
google-auth = ">=2.37.0"
|
||||
google-cloud-asset = ">=1.0.0"
|
||||
google-cloud-resource-manager = ">=1.14.2"
|
||||
httpx = ">=0.24.0"
|
||||
kubernetes = ">=22.6.0"
|
||||
marshmallow = ">=3.0.0rc7"
|
||||
msgraph-sdk = "*"
|
||||
marshmallow = ">=4.0.0"
|
||||
msgraph-sdk = ">=1.53.0"
|
||||
msrestazure = ">=0.6.4"
|
||||
neo4j = ">=6.0.0"
|
||||
oci = ">=2.71.0"
|
||||
okta = "<1.0.0"
|
||||
packageurl-python = "*"
|
||||
packaging = "*"
|
||||
packageurl-python = ">=0.17.0"
|
||||
packaging = ">=26.0.0"
|
||||
pagerduty = ">=4.0.1"
|
||||
policyuniverse = ">=1.1.0.0"
|
||||
PyJWT = {version = ">=2.0.0", extras = ["crypto"]}
|
||||
python-dateutil = "*"
|
||||
python-dateutil = ">=2.9.0"
|
||||
python-digitalocean = ">=1.16.0"
|
||||
pyyaml = ">=5.3.1"
|
||||
requests = ">=2.22.0"
|
||||
scaleway = ">=2.10.0"
|
||||
slack-sdk = ">=3.37.0"
|
||||
statsd = "*"
|
||||
statsd = ">=4.0.0"
|
||||
typer = ">=0.9.0"
|
||||
types-aiobotocore-ecr = "*"
|
||||
xmltodict = "*"
|
||||
types-aiobotocore-ecr = ">=3.1.0"
|
||||
workos = ">=5.44.0"
|
||||
xmltodict = ">=1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "celery"
|
||||
@@ -2503,62 +2504,74 @@ dev = ["bandit", "coverage", "flake8", "pydocstyle", "pylint", "pytest", "pytest
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "44.0.3"
|
||||
version = "46.0.7"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c"},
|
||||
{file = "cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4"},
|
||||
{file = "cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
|
||||
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
|
||||
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
|
||||
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""]
|
||||
pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
|
||||
nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -2961,7 +2974,7 @@ files = [
|
||||
[package.dependencies]
|
||||
autopep8 = "*"
|
||||
Django = ">=4.2"
|
||||
gprof2dot = ">=2017.09.19"
|
||||
gprof2dot = ">=2017.9.19"
|
||||
sqlparse = "*"
|
||||
|
||||
[[package]]
|
||||
@@ -3740,19 +3753,19 @@ urllib3 = ["packaging", "urllib3"]
|
||||
|
||||
[[package]]
|
||||
name = "google-auth-httplib2"
|
||||
version = "0.2.1"
|
||||
version = "0.2.0"
|
||||
description = "Google Authentication Library: httplib2 transport"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_auth_httplib2-0.2.1-py3-none-any.whl", hash = "sha256:1be94c611db91c01f9703e7f62b0a59bbd5587a95571c7b6fade510d648bc08b"},
|
||||
{file = "google_auth_httplib2-0.2.1.tar.gz", hash = "sha256:5ef03be3927423c87fb69607b42df23a444e434ddb2555b73b3679793187b7de"},
|
||||
{file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"},
|
||||
{file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-auth = ">=1.32.0,<3.0.0"
|
||||
httplib2 = ">=0.19.0,<1.0.0"
|
||||
google-auth = "*"
|
||||
httplib2 = ">=0.19.0"
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-access-context-manager"
|
||||
@@ -4569,7 +4582,7 @@ files = [
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=22.2.0"
|
||||
jsonschema-specifications = ">=2023.03.6"
|
||||
jsonschema-specifications = ">=2023.3.6"
|
||||
referencing = ">=0.28.4"
|
||||
rpds-py = ">=0.7.1"
|
||||
|
||||
@@ -4777,7 +4790,7 @@ librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""]
|
||||
mongodb = ["pymongo (==4.15.3)"]
|
||||
msgpack = ["msgpack (==1.1.2)"]
|
||||
pyro = ["pyro4 (==4.82)"]
|
||||
qpid = ["qpid-python (==1.36.0-1)", "qpid-tools (==1.36.0-1)"]
|
||||
qpid = ["qpid-python (==1.36.0.post1)", "qpid-tools (==1.36.0.post1)"]
|
||||
redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2,<6.5)"]
|
||||
slmq = ["softlayer_messaging (>=1.0.3)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
|
||||
@@ -4798,7 +4811,7 @@ files = [
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=14.05.14"
|
||||
certifi = ">=14.5.14"
|
||||
durationpy = ">=0.7"
|
||||
google-auth = ">=1.0.1"
|
||||
oauthlib = ">=3.2.2"
|
||||
@@ -5181,24 +5194,16 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "marshmallow"
|
||||
version = "3.26.2"
|
||||
version = "4.3.0"
|
||||
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73"},
|
||||
{file = "marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57"},
|
||||
{file = "marshmallow-4.3.0-py3-none-any.whl", hash = "sha256:46c4fe6984707e3cbd485dfebbf0a59874f58d695aad05c1668d15e8c6e13b46"},
|
||||
{file = "marshmallow-4.3.0.tar.gz", hash = "sha256:fb43c53b3fe240b8f6af37223d6ef1636f927ad9bea8ab323afad95dff090880"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
packaging = ">=17.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
|
||||
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
|
||||
tests = ["pytest", "simplejson"]
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib"
|
||||
version = "3.10.8"
|
||||
@@ -5492,14 +5497,14 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
|
||||
|
||||
[[package]]
|
||||
name = "msgraph-sdk"
|
||||
version = "1.23.0"
|
||||
version = "1.55.0"
|
||||
description = "The Microsoft Graph Python SDK"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "msgraph_sdk-1.23.0-py3-none-any.whl", hash = "sha256:58e0047b4ca59fd82022c02cd73fec0170a3d84f3b76721e3db2a0314df9a58a"},
|
||||
{file = "msgraph_sdk-1.23.0.tar.gz", hash = "sha256:6dd1ba9a46f5f0ce8599fd9610133adbd9d1493941438b5d3632fce9e55ed607"},
|
||||
{file = "msgraph_sdk-1.55.0-py3-none-any.whl", hash = "sha256:c8e68ebc4b88af5111de312e7fa910a4e76ddf48a4534feadb1fb8a411c48cfc"},
|
||||
{file = "msgraph_sdk-1.55.0.tar.gz", hash = "sha256:6df691a31954a050d26b8a678968017e157d940fb377f2a8a4e17a9741b98756"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -5925,23 +5930,24 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
|
||||
|
||||
[[package]]
|
||||
name = "oci"
|
||||
version = "2.160.3"
|
||||
version = "2.169.0"
|
||||
description = "Oracle Cloud Infrastructure Python SDK"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "oci-2.160.3-py3-none-any.whl", hash = "sha256:858bff3e697098bdda44833d2476bfb4632126f0182178e7dbde4dbd156d71f0"},
|
||||
{file = "oci-2.160.3.tar.gz", hash = "sha256:57514889be3b713a8385d86e3ba8a33cf46e3563c2a7e29a93027fb30b8a2537"},
|
||||
{file = "oci-2.169.0-py3-none-any.whl", hash = "sha256:c71bb5143f307791082b3e33cc1545c2490a518cfed85ab1948ef5107c36d30b"},
|
||||
{file = "oci-2.169.0.tar.gz", hash = "sha256:f3c5fff00b01783b5325ea7b13bf140053ec1e9f41da20bfb9c8a349ee7662fa"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
circuitbreaker = {version = ">=1.3.1,<3.0.0", markers = "python_version >= \"3.7\""}
|
||||
cryptography = ">=3.2.1,<46.0.0"
|
||||
pyOpenSSL = ">=17.5.0,<25.0.0"
|
||||
cryptography = ">=3.2.1,<47.0.0"
|
||||
pyOpenSSL = ">=17.5.0,<27.0.0"
|
||||
python-dateutil = ">=2.5.3,<3.0.0"
|
||||
pytz = ">=2016.10"
|
||||
urllib3 = {version = ">=2.6.3", markers = "python_version >= \"3.10.0\""}
|
||||
|
||||
[package.extras]
|
||||
adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""]
|
||||
@@ -6445,6 +6451,33 @@ docs = ["sphinx (>=1.7.1)"]
|
||||
redis = ["redis"]
|
||||
tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"]
|
||||
|
||||
[[package]]
|
||||
name = "prek"
|
||||
version = "0.3.9"
|
||||
description = "A Git hook manager written in Rust, designed as a drop-in alternative to pre-commit."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "prek-0.3.9-py3-none-linux_armv6l.whl", hash = "sha256:3ed793d51bfaa27bddb64d525d7acb77a7c8644f549412d82252e3eb0b88aad8"},
|
||||
{file = "prek-0.3.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:399c58400c0bd0b82a93a3c09dc1bfd88d8d0cfb242d414d2ed247187b06ead1"},
|
||||
{file = "prek-0.3.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e2ea1ffb124e92f081b8e2ca5b5a623a733efb3be0c5b1f4b7ffe2ee17d1f20c"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:aaf639f95b7301639298311d8d44aad0d0b4864e9736083ad3c71ce9765d37ab"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff104863b187fa443ea8451ca55d51e2c6e94f99f00d88784b5c3c4c623f1ebe"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:039ecaf87c63a3e67cca645ebd5bc5eb6aafa6c9d929e9a27b2921e7849d7ef9"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3bde2a3d045705095983c7f78ba04f72a7565fe1c2b4e85f5628502a254754ff"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a0960a21543563e2c8e19aaad176cc8423a87aac3c914d0f313030d7a9244a"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:0dfb5d5171d7523271909246ee306b4dc3d5b63752e7dd7c7e8a8908fc9490d1"},
|
||||
{file = "prek-0.3.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:82b791bd36c1430c84d3ae7220a85152babc7eaf00f70adcb961bd594e756ba3"},
|
||||
{file = "prek-0.3.9-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:6eac6d2f736b041118f053a1487abed468a70dd85a8688eaf87bb42d3dcecf20"},
|
||||
{file = "prek-0.3.9-py3-none-musllinux_1_1_i686.whl", hash = "sha256:5517e46e761367a3759b3168eabc120840ffbca9dfbc53187167298a98f87dc4"},
|
||||
{file = "prek-0.3.9-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:92024778cf78683ca32687bb249ab6a7d5c33887b5ee1d1a9f6d0c14228f4cf3"},
|
||||
{file = "prek-0.3.9-py3-none-win32.whl", hash = "sha256:7f89c55e5f480f5d073769e319924ad69d4bf9f98c5cb46a83082e26e634c958"},
|
||||
{file = "prek-0.3.9-py3-none-win_amd64.whl", hash = "sha256:7722f3372eaa83b147e70a43cb7b9fe2128c13d0c78d8a1cdbf2a8ec2ee071eb"},
|
||||
{file = "prek-0.3.9-py3-none-win_arm64.whl", hash = "sha256:0bced6278d6cc8a4b46048979e36bc9da034611dc8facd77ab123177b833a929"},
|
||||
{file = "prek-0.3.9.tar.gz", hash = "sha256:f82b92d81f42f1f90a47f5fbbf492373e25ef1f790080215b2722dd6da66510e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prompt-toolkit"
|
||||
version = "3.0.52"
|
||||
@@ -6632,7 +6665,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "prowler"
|
||||
version = "5.23.0"
|
||||
version = "5.26.0"
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
optional = false
|
||||
python-versions = ">=3.10,<3.13"
|
||||
@@ -6652,7 +6685,7 @@ alibabacloud-rds20140815 = "12.0.0"
|
||||
alibabacloud_sas20181203 = "6.1.0"
|
||||
alibabacloud-sls20201230 = "5.9.0"
|
||||
alibabacloud_sts20150401 = "1.1.6"
|
||||
alibabacloud_tea_openapi = "0.4.1"
|
||||
alibabacloud_tea_openapi = "0.4.4"
|
||||
alibabacloud_vpc20160428 = "6.13.0"
|
||||
alive-progress = "3.3.0"
|
||||
awsipranges = "0.3.3"
|
||||
@@ -6674,7 +6707,7 @@ azure-mgmt-postgresqlflexibleservers = "1.1.0"
|
||||
azure-mgmt-rdbms = "10.1.0"
|
||||
azure-mgmt-recoveryservices = "3.1.0"
|
||||
azure-mgmt-recoveryservicesbackup = "9.2.0"
|
||||
azure-mgmt-resource = "23.3.0"
|
||||
azure-mgmt-resource = "24.0.0"
|
||||
azure-mgmt-search = "9.1.0"
|
||||
azure-mgmt-security = "7.0.0"
|
||||
azure-mgmt-sql = "3.0.1"
|
||||
@@ -6687,29 +6720,29 @@ boto3 = "1.40.61"
|
||||
botocore = "1.40.61"
|
||||
cloudflare = "4.3.1"
|
||||
colorama = "0.4.6"
|
||||
cryptography = "44.0.3"
|
||||
cryptography = "46.0.7"
|
||||
dash = "3.1.1"
|
||||
dash-bootstrap-components = "2.0.3"
|
||||
defusedxml = ">=0.7.1"
|
||||
defusedxml = "0.7.1"
|
||||
detect-secrets = "1.5.0"
|
||||
dulwich = "0.23.0"
|
||||
google-api-python-client = "2.163.0"
|
||||
google-auth-httplib2 = ">=0.1,<0.3"
|
||||
google-auth-httplib2 = "0.2.0"
|
||||
h2 = "4.3.0"
|
||||
jsonschema = "4.23.0"
|
||||
kubernetes = "32.0.1"
|
||||
markdown = "3.10.2"
|
||||
microsoft-kiota-abstractions = "1.9.2"
|
||||
msgraph-sdk = "1.23.0"
|
||||
msgraph-sdk = "1.55.0"
|
||||
numpy = "2.0.2"
|
||||
oci = "2.160.3"
|
||||
oci = "2.169.0"
|
||||
openstacksdk = "4.2.0"
|
||||
pandas = "2.2.3"
|
||||
py-iam-expand = "0.1.0"
|
||||
py-ocsf-models = "0.8.1"
|
||||
pydantic = ">=2.0,<3.0"
|
||||
pydantic = "2.12.5"
|
||||
pygithub = "2.8.0"
|
||||
python-dateutil = ">=2.9.0.post0,<3.0.0"
|
||||
python-dateutil = "2.9.0.post0"
|
||||
pytz = "2025.1"
|
||||
schema = "0.7.5"
|
||||
shodan = "1.31.0"
|
||||
@@ -6722,7 +6755,7 @@ uuid6 = "2024.7.10"
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "6ac90eb1b58590b6f2f51645dbef17b9231053f4"
|
||||
resolved_reference = "16798e293da365965120961e6539e3a9756564f9"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -6887,14 +6920,14 @@ pydantic = ">=2.12.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.6.2"
|
||||
version = "0.6.3"
|
||||
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf"},
|
||||
{file = "pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b"},
|
||||
{file = "pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde"},
|
||||
{file = "pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7114,14 +7147,14 @@ urllib3 = ">=1.26.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
version = "2.20.0"
|
||||
description = "Pygments is a syntax highlighting package written in Python."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
|
||||
{file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
|
||||
{file = "pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176"},
|
||||
{file = "pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -7129,14 +7162,14 @@ windows-terminal = ["colorama (>=0.4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.11.0"
|
||||
version = "2.12.1"
|
||||
description = "JSON Web Token implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"},
|
||||
{file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"},
|
||||
{file = "pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c"},
|
||||
{file = "pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -7161,7 +7194,7 @@ files = [
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
astroid = ">=3.2.2,<=3.3.0-dev0"
|
||||
astroid = ">=3.2.2,<=3.3.0.dev0"
|
||||
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
|
||||
dill = [
|
||||
{version = ">=0.3.7", markers = "python_version >= \"3.12\""},
|
||||
@@ -7183,7 +7216,7 @@ description = "The MSALRuntime Python Interop Package"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "(platform_system == \"Windows\" or platform_system == \"Darwin\" or platform_system == \"Linux\") and sys_platform == \"win32\""
|
||||
markers = "sys_platform == \"win32\" and (platform_system == \"Windows\" or platform_system == \"Darwin\" or platform_system == \"Linux\")"
|
||||
files = [
|
||||
{file = "pymsalruntime-0.18.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0c22e2e83faa10de422bbfaacc1bb2887c9025ee8a53f0fc2e4f7db01c4a7b66"},
|
||||
{file = "pymsalruntime-0.18.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8ce2944a0f944833d047bb121396091e00287e2b6373716106da86ea99abf379"},
|
||||
@@ -7261,18 +7294,19 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "
|
||||
|
||||
[[package]]
|
||||
name = "pyopenssl"
|
||||
version = "24.3.0"
|
||||
version = "26.0.0"
|
||||
description = "Python wrapper module around the OpenSSL library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"},
|
||||
{file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"},
|
||||
{file = "pyopenssl-26.0.0-py3-none-any.whl", hash = "sha256:df94d28498848b98cc1c0ffb8ef1e71e40210d3b0a8064c9d29571ed2904bf81"},
|
||||
{file = "pyopenssl-26.0.0.tar.gz", hash = "sha256:f293934e52936f2e3413b89c6ce36df66a0b34ae1ea3a053b8c5020ff2f513fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=41.0.5,<45"
|
||||
cryptography = ">=46.0.0,<47"
|
||||
typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"]
|
||||
@@ -7324,24 +7358,25 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.2.2"
|
||||
version = "9.0.3"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
|
||||
{file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
|
||||
{file = "pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9"},
|
||||
{file = "pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=1.5,<2.0"
|
||||
colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
|
||||
iniconfig = ">=1.0.1"
|
||||
packaging = ">=22"
|
||||
pluggy = ">=1.5,<2"
|
||||
pygments = ">=2.7.2"
|
||||
|
||||
[package.extras]
|
||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-celery"
|
||||
@@ -7877,26 +7912,26 @@ shaping = ["uharfbuzz"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
version = "2.33.1"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
|
||||
{file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
|
||||
{file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"},
|
||||
{file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
certifi = ">=2023.5.7"
|
||||
charset_normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""}
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
urllib3 = ">=1.26,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-file"
|
||||
@@ -8174,10 +8209,10 @@ files = [
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.37.4,<2.0a.0"
|
||||
botocore = ">=1.37.4,<2.0a0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"]
|
||||
crt = ["botocore[crt] (>=1.37.4,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "safety"
|
||||
@@ -8779,6 +8814,23 @@ markupsafe = ">=2.1.1"
|
||||
[package.extras]
|
||||
watchdog = ["watchdog (>=2.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "workos"
|
||||
version = "6.0.4"
|
||||
description = "WorkOS Python Client"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "workos-6.0.4-py3-none-any.whl", hash = "sha256:548668b3702673536f853ba72a7b5bbbc269e467aaf9ac4f477b6e0177df5e21"},
|
||||
{file = "workos-6.0.4.tar.gz", hash = "sha256:b0bfe8fd212b8567422c4ea3732eb33608794033eb3a69900c6b04db183c32d6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=46.0,<47.0"
|
||||
httpx = ">=0.28,<1.0"
|
||||
pyjwt = ">=2.12,<3.0"
|
||||
|
||||
[[package]]
|
||||
name = "wrapt"
|
||||
version = "1.17.3"
|
||||
@@ -9372,4 +9424,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "167d4549788b8bc8bb7772b9a81ade1eab73d8f354251a8d6af4901223cc7f67"
|
||||
content-hash = "a3ab982d11a87d951ff15694d2ca7fd51f1f51a451abb0baa067ccf6966367a8"
|
||||
|
||||
+4
-4
@@ -38,7 +38,7 @@ dependencies = [
|
||||
"matplotlib (==3.10.8)",
|
||||
"reportlab (==4.4.10)",
|
||||
"neo4j (==6.1.0)",
|
||||
"cartography (==0.132.0)",
|
||||
"cartography (==0.135.0)",
|
||||
"gevent (==25.9.1)",
|
||||
"werkzeug (==3.1.7)",
|
||||
"sqlparse (==0.5.5)",
|
||||
@@ -50,7 +50,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.24.0"
|
||||
version = "1.28.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
@@ -62,10 +62,10 @@ django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
filelock = "3.20.3"
|
||||
freezegun = "1.5.1"
|
||||
marshmallow = "==3.26.2"
|
||||
mypy = "1.10.1"
|
||||
prek = "0.3.9"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
pytest = "9.0.3"
|
||||
pytest-cov = "5.0.0"
|
||||
pytest-django = "4.8.0"
|
||||
pytest-env = "1.1.3"
|
||||
|
||||
@@ -52,7 +52,7 @@ class ApiConfig(AppConfig):
|
||||
"check_and_fix_socialaccount_sites_migration",
|
||||
]
|
||||
|
||||
# Skip Neo4j initialization during tests, some Django commands, and Celery
|
||||
# Skip eager Neo4j init for tests, some Django commands, and Celery (prefork pool: driver must stay lazy, no post_fork hook)
|
||||
if getattr(settings, "TESTING", False) or (
|
||||
len(sys.argv) > 1
|
||||
and (
|
||||
@@ -64,7 +64,7 @@ class ApiConfig(AppConfig):
|
||||
)
|
||||
):
|
||||
logger.info(
|
||||
"Skipping Neo4j initialization because tests, some Django commands or Celery"
|
||||
"Skipping eager Neo4j init: tests, some Django commands, or Celery prefork pool (driver stays lazy)"
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
@@ -28,6 +28,7 @@ READ_QUERY_TIMEOUT_SECONDS = env.int(
|
||||
"ATTACK_PATHS_READ_QUERY_TIMEOUT_SECONDS", default=30
|
||||
)
|
||||
MAX_CUSTOM_QUERY_NODES = env.int("ATTACK_PATHS_MAX_CUSTOM_QUERY_NODES", default=250)
|
||||
CONN_ACQUISITION_TIMEOUT = env.int("NEO4J_CONN_ACQUISITION_TIMEOUT", default=15)
|
||||
READ_EXCEPTION_CODES = [
|
||||
"Neo.ClientError.Statement.AccessMode",
|
||||
"Neo.ClientError.Procedure.ProcedureNotFound",
|
||||
@@ -62,7 +63,7 @@ def init_driver() -> neo4j.Driver:
|
||||
auth=(config["USER"], config["PASSWORD"]),
|
||||
keep_alive=True,
|
||||
max_connection_lifetime=7200,
|
||||
connection_acquisition_timeout=120,
|
||||
connection_acquisition_timeout=CONN_ACQUISITION_TIMEOUT,
|
||||
max_connection_pool_size=50,
|
||||
)
|
||||
_driver.verify_connectivity()
|
||||
|
||||
@@ -484,8 +484,8 @@ AWS_BEDROCK_PRIVESC_PASSROLE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust Bedrock service (can be passed to Bedrock)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
// Find roles that trust the Bedrock AgentCore service (can be passed to a code interpreter)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
WHERE any(resource IN stmt_passrole.resource WHERE
|
||||
resource = '*'
|
||||
OR target_role.arn CONTAINS resource
|
||||
@@ -536,8 +536,8 @@ AWS_BEDROCK_PRIVESC_INVOKE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust Bedrock service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
// Find roles that trust the Bedrock AgentCore service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
|
||||
WITH collect(path_principal) + collect(path_target) AS paths
|
||||
UNWIND paths AS p
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from collections.abc import Iterable, Mapping
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
@@ -95,12 +94,12 @@ PROWLER_CHECKS = LazyChecksMapping()
|
||||
|
||||
|
||||
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
|
||||
"""
|
||||
Retrieve and cache the list of available compliance frameworks for a specific cloud provider.
|
||||
"""List compliance frameworks the API can load for `provider_type`.
|
||||
|
||||
This function lazily loads and caches the available compliance frameworks (e.g., CIS, MITRE, ISO)
|
||||
for each provider type (AWS, Azure, GCP, etc.) on first access. Subsequent calls for the same
|
||||
provider will return the cached result.
|
||||
The list is sourced from `Compliance.get_bulk` so that the names
|
||||
returned here are guaranteed to be loadable by the bulk loader. This
|
||||
prevents downstream key mismatches (e.g. CSV report generation iterating
|
||||
framework names and looking them up in the bulk dict).
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
|
||||
@@ -112,8 +111,8 @@ def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[s
|
||||
"""
|
||||
global AVAILABLE_COMPLIANCE_FRAMEWORKS
|
||||
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = (
|
||||
get_available_compliance_frameworks(provider_type)
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = list(
|
||||
Compliance.get_bulk(provider_type).keys()
|
||||
)
|
||||
|
||||
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
|
||||
|
||||
@@ -330,6 +330,7 @@ class MembershipFilter(FilterSet):
|
||||
model = Membership
|
||||
fields = {
|
||||
"tenant": ["exact"],
|
||||
"user": ["exact"],
|
||||
"role": ["exact"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
}
|
||||
@@ -1115,13 +1116,14 @@ class FindingGroupAggregatedComputedFilter(FilterSet):
|
||||
STATUS_CHOICES = (
|
||||
("FAIL", "Fail"),
|
||||
("PASS", "Pass"),
|
||||
("MUTED", "Muted"),
|
||||
("MANUAL", "Manual"),
|
||||
)
|
||||
|
||||
status = ChoiceFilter(method="filter_status", choices=STATUS_CHOICES)
|
||||
status__in = CharInFilter(method="filter_status_in", lookup_expr="in")
|
||||
severity = ChoiceFilter(method="filter_severity", choices=SeverityChoices)
|
||||
severity__in = CharInFilter(method="filter_severity_in", lookup_expr="in")
|
||||
muted = BooleanFilter(field_name="muted")
|
||||
include_muted = BooleanFilter(method="filter_include_muted")
|
||||
|
||||
def filter_status(self, queryset, name, value):
|
||||
@@ -1198,7 +1200,7 @@ class FindingGroupAggregatedComputedFilter(FilterSet):
|
||||
if value is True:
|
||||
return queryset
|
||||
# include_muted=false: exclude fully-muted groups
|
||||
return queryset.exclude(fail_count=0, pass_count=0, muted_count__gt=0)
|
||||
return queryset.exclude(muted=True)
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0087_vercel_provider"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="manual_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="pass_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="fail_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="manual_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="muted",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="new_fail_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="new_fail_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="new_pass_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="new_pass_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="new_manual_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="new_manual_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="changed_fail_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="changed_fail_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="changed_pass_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="changed_pass_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="changed_manual_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="findinggroupdailysummary",
|
||||
name="changed_manual_muted_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,31 @@
|
||||
from django.db import migrations
|
||||
from tasks.tasks import backfill_finding_group_summaries_task
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.rls import Tenant
|
||||
|
||||
|
||||
def trigger_backfill_task(apps, schema_editor):
|
||||
"""
|
||||
Re-dispatch the finding-group backfill task for every tenant so the new
|
||||
`manual_count` and `muted` columns added in 0088 get populated from the
|
||||
last 10 days of completed scans.
|
||||
|
||||
The aggregator (`aggregate_finding_group_summaries`) recomputes every
|
||||
column on each call, so it back-populates the new fields without touching
|
||||
the existing ones beyond a normal upsert.
|
||||
"""
|
||||
tenant_ids = Tenant.objects.using(MainRouter.admin_db).values_list("id", flat=True)
|
||||
|
||||
for tenant_id in tenant_ids:
|
||||
backfill_finding_group_summaries_task.delay(tenant_id=str(tenant_id), days=10)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0088_finding_group_status_muted_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(trigger_backfill_task, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
from django.db import migrations
|
||||
|
||||
TASK_NAME = "attack-paths-cleanup-stale-scans"
|
||||
|
||||
|
||||
def set_cleanup_priority(apps, schema_editor):
|
||||
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
|
||||
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=0)
|
||||
|
||||
|
||||
def unset_cleanup_priority(apps, schema_editor):
|
||||
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
|
||||
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=None)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0089_backfill_finding_group_status_muted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(set_cleanup_priority, unset_cleanup_priority),
|
||||
]
|
||||
@@ -0,0 +1,31 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0090_attack_paths_cleanup_priority"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_arrays_idx",
|
||||
columns="categories, resource_services, resource_regions, resource_types",
|
||||
method="GIN",
|
||||
all_partitions=True,
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_arrays_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,73 @@
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
|
||||
INDEX_NAME = "gin_find_arrays_idx"
|
||||
PARENT_TABLE = "findings"
|
||||
|
||||
|
||||
def create_parent_and_attach(apps, schema_editor):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
# Idempotent: the parent index may already exist if it was created
|
||||
# manually on an environment before this migration ran.
|
||||
cursor.execute(
|
||||
f"CREATE INDEX IF NOT EXISTS {INDEX_NAME} ON ONLY {PARENT_TABLE} "
|
||||
f"USING gin (categories, resource_services, resource_regions, resource_types)"
|
||||
)
|
||||
cursor.execute(
|
||||
"SELECT inhrelid::regclass::text "
|
||||
"FROM pg_inherits "
|
||||
"WHERE inhparent = %s::regclass",
|
||||
[PARENT_TABLE],
|
||||
)
|
||||
for (partition,) in cursor.fetchall():
|
||||
child_idx = f"{partition.replace('.', '_')}_{INDEX_NAME}"
|
||||
# ALTER INDEX ... ATTACH PARTITION has no IF NOT ATTACHED clause,
|
||||
# so check pg_inherits first to keep the migration re-runnable.
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT 1
|
||||
FROM pg_inherits i
|
||||
JOIN pg_class p ON p.oid = i.inhparent
|
||||
JOIN pg_class c ON c.oid = i.inhrelid
|
||||
WHERE p.relname = %s AND c.relname = %s
|
||||
""",
|
||||
[INDEX_NAME, child_idx],
|
||||
)
|
||||
if cursor.fetchone() is None:
|
||||
cursor.execute(f"ALTER INDEX {INDEX_NAME} ATTACH PARTITION {child_idx}")
|
||||
|
||||
|
||||
def drop_parent_index(apps, schema_editor):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP INDEX IF EXISTS {INDEX_NAME}")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0091_findings_arrays_gin_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.SeparateDatabaseAndState(
|
||||
state_operations=[
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=[
|
||||
"categories",
|
||||
"resource_services",
|
||||
"resource_regions",
|
||||
"resource_types",
|
||||
],
|
||||
name=INDEX_NAME,
|
||||
),
|
||||
),
|
||||
],
|
||||
database_operations=[
|
||||
migrations.RunPython(
|
||||
create_parent_and_attach,
|
||||
reverse_code=drop_parent_index,
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -595,10 +595,40 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
|
||||
_SCOPING_SCANNER_ARG_KEYS_CACHE: tuple[str, ...] | None = None
|
||||
|
||||
@classmethod
|
||||
def get_scoping_scanner_arg_keys(cls) -> tuple[str, ...]:
|
||||
"""Return the scanner_args keys that mark a scan as scoped.
|
||||
|
||||
Derived from ``prowler.lib.scan.scan.Scan.__init__`` so the API stays
|
||||
in sync with whatever the SDK actually accepts as filters. Cached at
|
||||
class level — the signature is stable for the process lifetime.
|
||||
"""
|
||||
if cls._SCOPING_SCANNER_ARG_KEYS_CACHE is None:
|
||||
import inspect
|
||||
|
||||
from prowler.lib.scan.scan import Scan as ProwlerScan
|
||||
|
||||
params = inspect.signature(ProwlerScan.__init__).parameters
|
||||
cls._SCOPING_SCANNER_ARG_KEYS_CACHE = tuple(
|
||||
name for name in params if name not in ("self", "provider")
|
||||
)
|
||||
return cls._SCOPING_SCANNER_ARG_KEYS_CACHE
|
||||
|
||||
class TriggerChoices(models.TextChoices):
|
||||
SCHEDULED = "scheduled", _("Scheduled")
|
||||
MANUAL = "manual", _("Manual")
|
||||
|
||||
# Trigger values for scans that ran the SDK end-to-end. Imported scans (or
|
||||
# any future trigger) are intentionally NOT in this set — they may carry
|
||||
# only a partial slice of resources, so post-scan logic that depends on a
|
||||
# full-scope sweep (e.g. resetting ephemeral resource findings) must skip
|
||||
# them by default.
|
||||
LIVE_SCAN_TRIGGERS = frozenset(
|
||||
(TriggerChoices.SCHEDULED.value, TriggerChoices.MANUAL.value)
|
||||
)
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid7, editable=False)
|
||||
name = models.CharField(
|
||||
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
|
||||
@@ -681,6 +711,24 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scans"
|
||||
|
||||
def is_full_scope(self) -> bool:
|
||||
"""Return True if this scan ran with no scoping filters at all.
|
||||
|
||||
Used to gate post-scan operations (such as resetting the
|
||||
failed_findings_count of resources missing from the scan) that are only
|
||||
safe when the scan covered every check, service, and category. Imported
|
||||
scans are NOT full-scope by definition — they may carry only a partial
|
||||
slice of resources, so they're rejected via ``trigger`` even before the
|
||||
scanner_args check.
|
||||
"""
|
||||
if self.trigger not in self.LIVE_SCAN_TRIGGERS:
|
||||
return False
|
||||
scanner_args = self.scanner_args or {}
|
||||
for key in self.get_scoping_scanner_arg_keys():
|
||||
if scanner_args.get(key):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class AttackPathsScan(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
@@ -898,7 +946,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
OpClass(Upper("name"), name="gin_trgm_ops"),
|
||||
name="res_name_trgm_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
|
||||
models.Index(fields=["tenant_id", "id"], name="resources_tenant_id_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
@@ -1104,6 +1151,15 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
GinIndex(
|
||||
fields=[
|
||||
"categories",
|
||||
"resource_services",
|
||||
"resource_regions",
|
||||
"resource_types",
|
||||
],
|
||||
name="gin_find_arrays_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -1748,15 +1804,45 @@ class FindingGroupDailySummary(RowLevelSecurityProtectedModel):
|
||||
# Severity stored as integer for MAX aggregation (5=critical, 4=high, etc.)
|
||||
severity_order = models.SmallIntegerField(default=1)
|
||||
|
||||
# Finding counts
|
||||
# Finding counts (inclusive of muted findings; use the `muted` flag to
|
||||
# tell whether the group has any actionable findings).
|
||||
pass_count = models.IntegerField(default=0)
|
||||
fail_count = models.IntegerField(default=0)
|
||||
manual_count = models.IntegerField(default=0)
|
||||
muted_count = models.IntegerField(default=0)
|
||||
|
||||
# Delta counts
|
||||
# Status counts restricted to muted findings, so clients can isolate the
|
||||
# muted half of each status (e.g. `pass_count - pass_muted_count` gives the
|
||||
# actionable PASS findings).
|
||||
pass_muted_count = models.IntegerField(default=0)
|
||||
fail_muted_count = models.IntegerField(default=0)
|
||||
manual_muted_count = models.IntegerField(default=0)
|
||||
|
||||
# Whether every finding for this (provider, check, day) is muted.
|
||||
muted = models.BooleanField(default=False)
|
||||
|
||||
# Delta counts (non-muted, kept for convenience and as a "total" view).
|
||||
new_count = models.IntegerField(default=0)
|
||||
changed_count = models.IntegerField(default=0)
|
||||
|
||||
# Delta breakdown by (status, muted) so clients can answer questions like
|
||||
# "how many new failing findings appeared in this scan?" without scanning
|
||||
# the underlying findings table. Mirrors the existing pass/fail/manual
|
||||
# naming, with `_muted_count` siblings tracking the muted half of each
|
||||
# bucket explicitly.
|
||||
new_fail_count = models.IntegerField(default=0)
|
||||
new_fail_muted_count = models.IntegerField(default=0)
|
||||
new_pass_count = models.IntegerField(default=0)
|
||||
new_pass_muted_count = models.IntegerField(default=0)
|
||||
new_manual_count = models.IntegerField(default=0)
|
||||
new_manual_muted_count = models.IntegerField(default=0)
|
||||
changed_fail_count = models.IntegerField(default=0)
|
||||
changed_fail_muted_count = models.IntegerField(default=0)
|
||||
changed_pass_count = models.IntegerField(default=0)
|
||||
changed_pass_muted_count = models.IntegerField(default=0)
|
||||
changed_manual_count = models.IntegerField(default=0)
|
||||
changed_manual_muted_count = models.IntegerField(default=0)
|
||||
|
||||
# Resource counts
|
||||
resources_fail = models.IntegerField(default=0)
|
||||
resources_total = models.IntegerField(default=0)
|
||||
|
||||
+1620
-58
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,8 @@ from unittest.mock import MagicMock, patch
|
||||
import neo4j
|
||||
import pytest
|
||||
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
|
||||
class TestLazyInitialization:
|
||||
"""Test that Neo4j driver is initialized lazily on first use."""
|
||||
@@ -19,8 +21,6 @@ class TestLazyInitialization:
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module_state(self):
|
||||
"""Reset module-level singleton state before each test."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
original_driver = db_module._driver
|
||||
|
||||
db_module._driver = None
|
||||
@@ -31,8 +31,6 @@ class TestLazyInitialization:
|
||||
|
||||
def test_driver_not_initialized_at_import(self):
|
||||
"""Driver should be None after module import (no eager connection)."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
assert db_module._driver is None
|
||||
|
||||
@patch("api.attack_paths.database.settings")
|
||||
@@ -41,8 +39,6 @@ class TestLazyInitialization:
|
||||
self, mock_driver_factory, mock_settings
|
||||
):
|
||||
"""init_driver() should create connection only when called."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver = MagicMock()
|
||||
mock_driver_factory.return_value = mock_driver
|
||||
mock_settings.DATABASES = {
|
||||
@@ -69,8 +65,6 @@ class TestLazyInitialization:
|
||||
self, mock_driver_factory, mock_settings
|
||||
):
|
||||
"""Subsequent calls should return cached driver without reconnecting."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver = MagicMock()
|
||||
mock_driver_factory.return_value = mock_driver
|
||||
mock_settings.DATABASES = {
|
||||
@@ -99,8 +93,6 @@ class TestLazyInitialization:
|
||||
self, mock_driver_factory, mock_settings
|
||||
):
|
||||
"""get_driver() should use init_driver() for lazy initialization."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver = MagicMock()
|
||||
mock_driver_factory.return_value = mock_driver
|
||||
mock_settings.DATABASES = {
|
||||
@@ -118,14 +110,50 @@ class TestLazyInitialization:
|
||||
mock_driver_factory.assert_called_once()
|
||||
|
||||
|
||||
class TestConnectionAcquisitionTimeout:
|
||||
"""Test that the connection acquisition timeout is configurable."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module_state(self):
|
||||
original_driver = db_module._driver
|
||||
original_timeout = db_module.CONN_ACQUISITION_TIMEOUT
|
||||
|
||||
db_module._driver = None
|
||||
|
||||
yield
|
||||
|
||||
db_module._driver = original_driver
|
||||
db_module.CONN_ACQUISITION_TIMEOUT = original_timeout
|
||||
|
||||
@patch("api.attack_paths.database.settings")
|
||||
@patch("api.attack_paths.database.neo4j.GraphDatabase.driver")
|
||||
def test_driver_receives_configured_timeout(
|
||||
self, mock_driver_factory, mock_settings
|
||||
):
|
||||
"""init_driver() should pass CONN_ACQUISITION_TIMEOUT to the neo4j driver."""
|
||||
mock_driver_factory.return_value = MagicMock()
|
||||
mock_settings.DATABASES = {
|
||||
"neo4j": {
|
||||
"HOST": "localhost",
|
||||
"PORT": 7687,
|
||||
"USER": "neo4j",
|
||||
"PASSWORD": "password",
|
||||
}
|
||||
}
|
||||
db_module.CONN_ACQUISITION_TIMEOUT = 42
|
||||
|
||||
db_module.init_driver()
|
||||
|
||||
_, kwargs = mock_driver_factory.call_args
|
||||
assert kwargs["connection_acquisition_timeout"] == 42
|
||||
|
||||
|
||||
class TestAtexitRegistration:
|
||||
"""Test that atexit cleanup handler is registered correctly."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module_state(self):
|
||||
"""Reset module-level singleton state before each test."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
original_driver = db_module._driver
|
||||
|
||||
db_module._driver = None
|
||||
@@ -141,8 +169,6 @@ class TestAtexitRegistration:
|
||||
self, mock_driver_factory, mock_atexit_register, mock_settings
|
||||
):
|
||||
"""atexit.register should be called on first initialization."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver_factory.return_value = MagicMock()
|
||||
mock_settings.DATABASES = {
|
||||
"neo4j": {
|
||||
@@ -168,8 +194,6 @@ class TestAtexitRegistration:
|
||||
The double-checked locking on _driver ensures the atexit registration
|
||||
block only executes once (when _driver is first created).
|
||||
"""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver_factory.return_value = MagicMock()
|
||||
mock_settings.DATABASES = {
|
||||
"neo4j": {
|
||||
@@ -194,8 +218,6 @@ class TestCloseDriver:
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module_state(self):
|
||||
"""Reset module-level singleton state before each test."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
original_driver = db_module._driver
|
||||
|
||||
db_module._driver = None
|
||||
@@ -206,8 +228,6 @@ class TestCloseDriver:
|
||||
|
||||
def test_close_driver_closes_and_clears_driver(self):
|
||||
"""close_driver() should close the driver and set it to None."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver = MagicMock()
|
||||
db_module._driver = mock_driver
|
||||
|
||||
@@ -218,8 +238,6 @@ class TestCloseDriver:
|
||||
|
||||
def test_close_driver_handles_none_driver(self):
|
||||
"""close_driver() should handle case where driver is None."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
db_module._driver = None
|
||||
|
||||
# Should not raise
|
||||
@@ -229,8 +247,6 @@ class TestCloseDriver:
|
||||
|
||||
def test_close_driver_clears_driver_even_on_close_error(self):
|
||||
"""Driver should be cleared even if close() raises an exception."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver = MagicMock()
|
||||
mock_driver.close.side_effect = Exception("Connection error")
|
||||
db_module._driver = mock_driver
|
||||
@@ -246,8 +262,6 @@ class TestExecuteReadQuery:
|
||||
"""Test read query execution helper."""
|
||||
|
||||
def test_execute_read_query_calls_read_session_and_returns_result(self):
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
tx = MagicMock()
|
||||
expected_graph = MagicMock()
|
||||
run_result = MagicMock()
|
||||
@@ -289,8 +303,6 @@ class TestExecuteReadQuery:
|
||||
assert result is expected_graph
|
||||
|
||||
def test_execute_read_query_defaults_parameters_to_empty_dict(self):
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
tx = MagicMock()
|
||||
run_result = MagicMock()
|
||||
run_result.graph.return_value = MagicMock()
|
||||
@@ -325,8 +337,6 @@ class TestGetSessionReadOnly:
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module_state(self):
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
original_driver = db_module._driver
|
||||
db_module._driver = None
|
||||
yield
|
||||
@@ -341,8 +351,6 @@ class TestGetSessionReadOnly:
|
||||
)
|
||||
def test_get_session_raises_write_query_not_allowed(self, neo4j_code):
|
||||
"""Read-mode Neo4j errors should raise `WriteQueryNotAllowedException`."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_session = MagicMock()
|
||||
neo4j_error = neo4j.exceptions.Neo4jError._hydrate_neo4j(
|
||||
code=neo4j_code,
|
||||
@@ -362,8 +370,6 @@ class TestGetSessionReadOnly:
|
||||
|
||||
def test_get_session_raises_generic_exception_for_other_errors(self):
|
||||
"""Non-read-mode Neo4j errors should raise GraphDatabaseQueryException."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_session = MagicMock()
|
||||
neo4j_error = neo4j.exceptions.Neo4jError._hydrate_neo4j(
|
||||
code="Neo.ClientError.Statement.SyntaxError",
|
||||
@@ -388,8 +394,6 @@ class TestThreadSafety:
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module_state(self):
|
||||
"""Reset module-level singleton state before each test."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
original_driver = db_module._driver
|
||||
|
||||
db_module._driver = None
|
||||
@@ -404,8 +408,6 @@ class TestThreadSafety:
|
||||
self, mock_driver_factory, mock_settings
|
||||
):
|
||||
"""Multiple threads calling init_driver() should create only one driver."""
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_driver = MagicMock()
|
||||
mock_driver_factory.return_value = mock_driver
|
||||
mock_settings.DATABASES = {
|
||||
@@ -448,8 +450,6 @@ class TestHasProviderData:
|
||||
"""Test has_provider_data helper for checking provider nodes in Neo4j."""
|
||||
|
||||
def test_returns_true_when_nodes_exist(self):
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_session = MagicMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.single.return_value = MagicMock() # non-None record
|
||||
@@ -468,8 +468,6 @@ class TestHasProviderData:
|
||||
mock_session.run.assert_called_once()
|
||||
|
||||
def test_returns_false_when_no_nodes(self):
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
mock_session = MagicMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.single.return_value = None
|
||||
@@ -486,8 +484,6 @@ class TestHasProviderData:
|
||||
assert db_module.has_provider_data("db-tenant-abc", "provider-123") is False
|
||||
|
||||
def test_returns_false_when_database_not_found(self):
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
session_ctx = MagicMock()
|
||||
session_ctx.__enter__.side_effect = db_module.GraphDatabaseQueryException(
|
||||
message="Database does not exist",
|
||||
@@ -503,8 +499,6 @@ class TestHasProviderData:
|
||||
)
|
||||
|
||||
def test_raises_on_other_errors(self):
|
||||
import api.attack_paths.database as db_module
|
||||
|
||||
session_ctx = MagicMock()
|
||||
session_ctx.__enter__.side_effect = db_module.GraphDatabaseQueryException(
|
||||
message="Connection refused",
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from api import compliance as compliance_module
|
||||
from api.compliance import (
|
||||
generate_compliance_overview_template,
|
||||
generate_scan_compliance,
|
||||
get_compliance_frameworks,
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_checks,
|
||||
)
|
||||
from api.models import Provider
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
|
||||
|
||||
class TestCompliance:
|
||||
@@ -250,3 +255,58 @@ class TestCompliance:
|
||||
}
|
||||
|
||||
assert template == expected_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reset_compliance_cache():
|
||||
"""Reset the module-level cache so each test starts cold."""
|
||||
previous = dict(compliance_module.AVAILABLE_COMPLIANCE_FRAMEWORKS)
|
||||
compliance_module.AVAILABLE_COMPLIANCE_FRAMEWORKS.clear()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
compliance_module.AVAILABLE_COMPLIANCE_FRAMEWORKS.clear()
|
||||
compliance_module.AVAILABLE_COMPLIANCE_FRAMEWORKS.update(previous)
|
||||
|
||||
|
||||
class TestGetComplianceFrameworks:
|
||||
def test_returns_keys_from_compliance_get_bulk(self, reset_compliance_cache):
|
||||
with patch("api.compliance.Compliance") as mock_compliance:
|
||||
mock_compliance.get_bulk.return_value = {
|
||||
"cis_1.4_aws": MagicMock(),
|
||||
"mitre_attack_aws": MagicMock(),
|
||||
}
|
||||
result = get_compliance_frameworks(Provider.ProviderChoices.AWS)
|
||||
|
||||
assert sorted(result) == ["cis_1.4_aws", "mitre_attack_aws"]
|
||||
mock_compliance.get_bulk.assert_called_once_with(Provider.ProviderChoices.AWS)
|
||||
|
||||
def test_caches_result_per_provider(self, reset_compliance_cache):
|
||||
with patch("api.compliance.Compliance") as mock_compliance:
|
||||
mock_compliance.get_bulk.return_value = {"cis_1.4_aws": MagicMock()}
|
||||
get_compliance_frameworks(Provider.ProviderChoices.AWS)
|
||||
get_compliance_frameworks(Provider.ProviderChoices.AWS)
|
||||
|
||||
# Cached after first call.
|
||||
assert mock_compliance.get_bulk.call_count == 1
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"provider_type",
|
||||
[choice.value for choice in Provider.ProviderChoices],
|
||||
)
|
||||
def test_listing_is_subset_of_bulk(self, reset_compliance_cache, provider_type):
|
||||
"""Regression for CLOUD-API-40S: every name returned by
|
||||
``get_compliance_frameworks`` must be loadable via ``Compliance.get_bulk``.
|
||||
|
||||
A divergence here is what produced ``KeyError: 'csa_ccm_4.0'`` in
|
||||
``generate_outputs_task`` after universal/multi-provider compliance
|
||||
JSONs were introduced at the top-level ``prowler/compliance/`` path.
|
||||
"""
|
||||
bulk_keys = set(Compliance.get_bulk(provider_type).keys())
|
||||
listed = set(get_compliance_frameworks(provider_type))
|
||||
|
||||
missing = listed - bulk_keys
|
||||
assert not missing, (
|
||||
f"get_compliance_frameworks({provider_type!r}) returned names not "
|
||||
f"loadable by Compliance.get_bulk: {sorted(missing)}"
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4185,6 +4185,7 @@ class FindingGroupSerializer(BaseSerializerV1):
|
||||
check_description = serializers.CharField(required=False, allow_null=True)
|
||||
severity = serializers.CharField()
|
||||
status = serializers.CharField()
|
||||
muted = serializers.BooleanField()
|
||||
impacted_providers = serializers.ListField(
|
||||
child=serializers.CharField(), required=False
|
||||
)
|
||||
@@ -4192,9 +4193,25 @@ class FindingGroupSerializer(BaseSerializerV1):
|
||||
resources_total = serializers.IntegerField()
|
||||
pass_count = serializers.IntegerField()
|
||||
fail_count = serializers.IntegerField()
|
||||
manual_count = serializers.IntegerField()
|
||||
pass_muted_count = serializers.IntegerField()
|
||||
fail_muted_count = serializers.IntegerField()
|
||||
manual_muted_count = serializers.IntegerField()
|
||||
muted_count = serializers.IntegerField()
|
||||
new_count = serializers.IntegerField()
|
||||
changed_count = serializers.IntegerField()
|
||||
new_fail_count = serializers.IntegerField()
|
||||
new_fail_muted_count = serializers.IntegerField()
|
||||
new_pass_count = serializers.IntegerField()
|
||||
new_pass_muted_count = serializers.IntegerField()
|
||||
new_manual_count = serializers.IntegerField()
|
||||
new_manual_muted_count = serializers.IntegerField()
|
||||
changed_fail_count = serializers.IntegerField()
|
||||
changed_fail_muted_count = serializers.IntegerField()
|
||||
changed_pass_count = serializers.IntegerField()
|
||||
changed_pass_muted_count = serializers.IntegerField()
|
||||
changed_manual_count = serializers.IntegerField()
|
||||
changed_manual_muted_count = serializers.IntegerField()
|
||||
first_seen_at = serializers.DateTimeField(required=False, allow_null=True)
|
||||
last_seen_at = serializers.DateTimeField(required=False, allow_null=True)
|
||||
failing_since = serializers.DateTimeField(required=False, allow_null=True)
|
||||
@@ -4208,14 +4225,17 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
|
||||
Serializer for Finding Group Resources - resources within a finding group.
|
||||
|
||||
Returns individual resources with their current status, severity,
|
||||
and timing information.
|
||||
and timing information. Orphan findings (without any resource) expose the
|
||||
finding id as `id` so the row stays identifiable in the UI.
|
||||
"""
|
||||
|
||||
id = serializers.UUIDField(source="resource_id")
|
||||
id = serializers.UUIDField(source="row_id")
|
||||
resource = serializers.SerializerMethodField()
|
||||
provider = serializers.SerializerMethodField()
|
||||
finding_id = serializers.UUIDField()
|
||||
status = serializers.CharField()
|
||||
severity = serializers.CharField()
|
||||
muted = serializers.BooleanField()
|
||||
delta = serializers.CharField(required=False, allow_null=True)
|
||||
first_seen_at = serializers.DateTimeField(required=False, allow_null=True)
|
||||
last_seen_at = serializers.DateTimeField(required=False, allow_null=True)
|
||||
|
||||
+757
-156
File diff suppressed because it is too large
Load Diff
@@ -17,8 +17,10 @@ celery_app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
celery_app.conf.update(result_extended=True, result_expires=None)
|
||||
|
||||
celery_app.conf.broker_transport_options = {
|
||||
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
|
||||
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT,
|
||||
"queue_order_strategy": "priority",
|
||||
}
|
||||
celery_app.conf.task_default_priority = 6
|
||||
celery_app.conf.result_backend_transport_options = {
|
||||
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ from config.django.production import LOGGING as DJANGO_LOGGERS, DEBUG # noqa: E
|
||||
from config.custom_logging import BackendLogger # noqa: E402
|
||||
|
||||
BIND_ADDRESS = env("DJANGO_BIND_ADDRESS", default="127.0.0.1")
|
||||
PORT = env("DJANGO_PORT", default=8000)
|
||||
PORT = env("DJANGO_PORT", default=8080)
|
||||
|
||||
# Server settings
|
||||
bind = f"{BIND_ADDRESS}:{PORT}"
|
||||
|
||||
@@ -120,6 +120,7 @@ sentry_sdk.init(
|
||||
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
|
||||
before_send=before_send,
|
||||
send_default_pii=True,
|
||||
traces_sample_rate=env.float("DJANGO_SENTRY_TRACES_SAMPLE_RATE", default=0.02),
|
||||
_experiments={
|
||||
# Set continuous_profiling_auto_start to True
|
||||
# to automatically start the profiler on when
|
||||
|
||||
@@ -14,8 +14,8 @@ from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
from tasks.jobs.backfill import (
|
||||
backfill_resource_scan_summaries,
|
||||
backfill_scan_category_summaries,
|
||||
backfill_scan_resource_group_summaries,
|
||||
aggregate_scan_category_summaries,
|
||||
aggregate_scan_resource_group_summaries,
|
||||
)
|
||||
|
||||
from api.attack_paths import (
|
||||
@@ -1445,8 +1445,8 @@ def latest_scan_finding_with_categories(
|
||||
)
|
||||
finding.add_resources([resource])
|
||||
backfill_resource_scan_summaries(tenant_id, str(scan.id))
|
||||
backfill_scan_category_summaries(tenant_id, str(scan.id))
|
||||
backfill_scan_resource_group_summaries(tenant_id, str(scan.id))
|
||||
aggregate_scan_category_summaries(tenant_id, str(scan.id))
|
||||
aggregate_scan_resource_group_summaries(tenant_id, str(scan.id))
|
||||
return finding
|
||||
|
||||
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 131 KiB |
@@ -1,6 +1,8 @@
|
||||
# Portions of this file are based on code from the Cartography project
|
||||
# (https://github.com/cartography-cncf/cartography), which is licensed under the Apache 2.0 License.
|
||||
|
||||
import time
|
||||
|
||||
from typing import Any
|
||||
|
||||
import aioboto3
|
||||
@@ -33,7 +35,7 @@ def start_aws_ingestion(
|
||||
|
||||
For the scan progress updates:
|
||||
- The caller of this function (`tasks.jobs.attack_paths.scan.run`) has set it to 2.
|
||||
- When the control returns to the caller, it will be set to 95.
|
||||
- When the control returns to the caller, it will be set to 93.
|
||||
"""
|
||||
|
||||
# Initialize variables common to all jobs
|
||||
@@ -47,7 +49,7 @@ def start_aws_ingestion(
|
||||
}
|
||||
|
||||
boto3_session = get_boto3_session(prowler_api_provider, prowler_sdk_provider)
|
||||
regions: list[str] = list(prowler_sdk_provider._enabled_regions)
|
||||
regions: list[str] = resolve_aws_regions(prowler_api_provider, prowler_sdk_provider)
|
||||
requested_syncs = list(cartography_aws.RESOURCE_FUNCTIONS.keys())
|
||||
|
||||
sync_args = cartography_aws._build_aws_sync_kwargs(
|
||||
@@ -89,34 +91,50 @@ def start_aws_ingestion(
|
||||
logger.info(
|
||||
f"Syncing function permission_relationships for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws.RESOURCE_FUNCTIONS["permission_relationships"](**sync_args)
|
||||
logger.info(
|
||||
f"Synced function permission_relationships for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 88)
|
||||
|
||||
if "resourcegroupstaggingapi" in requested_syncs:
|
||||
logger.info(
|
||||
f"Syncing function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws.RESOURCE_FUNCTIONS["resourcegroupstaggingapi"](**sync_args)
|
||||
logger.info(
|
||||
f"Synced function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 89)
|
||||
|
||||
logger.info(
|
||||
f"Syncing ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws.run_scoped_analysis_job(
|
||||
"aws_ec2_iaminstanceprofile.json",
|
||||
neo4j_session,
|
||||
common_job_parameters,
|
||||
)
|
||||
logger.info(
|
||||
f"Synced ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 90)
|
||||
|
||||
logger.info(
|
||||
f"Syncing lambda_ecr analysis for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws.run_analysis_job(
|
||||
"aws_lambda_ecr.json",
|
||||
neo4j_session,
|
||||
common_job_parameters,
|
||||
)
|
||||
logger.info(
|
||||
f"Synced lambda_ecr analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
|
||||
if all(
|
||||
s in requested_syncs
|
||||
@@ -125,25 +143,34 @@ def start_aws_ingestion(
|
||||
logger.info(
|
||||
f"Syncing lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws.run_scoped_analysis_job(
|
||||
"aws_lb_container_exposure.json",
|
||||
neo4j_session,
|
||||
common_job_parameters,
|
||||
)
|
||||
logger.info(
|
||||
f"Synced lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
|
||||
if all(s in requested_syncs for s in ["ec2:network_acls", "ec2:load_balancer_v2"]):
|
||||
logger.info(
|
||||
f"Syncing lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws.run_scoped_analysis_job(
|
||||
"aws_lb_nacl_direct.json",
|
||||
neo4j_session,
|
||||
common_job_parameters,
|
||||
)
|
||||
logger.info(
|
||||
f"Synced lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 91)
|
||||
|
||||
logger.info(f"Syncing metadata for AWS account {prowler_api_provider.uid}")
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws.merge_module_sync_metadata(
|
||||
neo4j_session,
|
||||
group_type="AWSAccount",
|
||||
@@ -152,24 +179,23 @@ def start_aws_ingestion(
|
||||
update_tag=cartography_config.update_tag,
|
||||
stat_handler=cartography_aws.stat_handler,
|
||||
)
|
||||
logger.info(
|
||||
f"Synced metadata for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 92)
|
||||
|
||||
# Removing the added extra field
|
||||
del common_job_parameters["AWS_ID"]
|
||||
|
||||
logger.info(f"Syncing cleanup_job for AWS account {prowler_api_provider.uid}")
|
||||
cartography_aws.run_cleanup_job(
|
||||
"aws_post_ingestion_principals_cleanup.json",
|
||||
neo4j_session,
|
||||
common_job_parameters,
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
|
||||
|
||||
logger.info(f"Syncing analysis for AWS account {prowler_api_provider.uid}")
|
||||
t0 = time.perf_counter()
|
||||
cartography_aws._perform_aws_analysis(
|
||||
requested_syncs, neo4j_session, common_job_parameters
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94)
|
||||
logger.info(
|
||||
f"Synced analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
|
||||
|
||||
return failed_syncs
|
||||
|
||||
@@ -200,6 +226,48 @@ def get_boto3_session(
|
||||
return boto3_session
|
||||
|
||||
|
||||
def resolve_aws_regions(
|
||||
prowler_api_provider: ProwlerAPIProvider,
|
||||
prowler_sdk_provider: ProwlerSDKProvider,
|
||||
) -> list[str]:
|
||||
"""Resolve the regions to scan, falling back when `_enabled_regions` is `None`.
|
||||
|
||||
The SDK silently sets `_enabled_regions` to `None` when `ec2:DescribeRegions`
|
||||
fails (missing IAM permission, transient error). Without a fallback the
|
||||
Cartography ingestion crashes with a non-actionable `TypeError`. Try the
|
||||
user's `audited_regions` next, then the partition's static region list.
|
||||
Excluded regions are honored on every branch.
|
||||
"""
|
||||
if prowler_sdk_provider._enabled_regions is not None:
|
||||
regions = set(prowler_sdk_provider._enabled_regions)
|
||||
|
||||
elif prowler_sdk_provider.identity.audited_regions:
|
||||
regions = set(prowler_sdk_provider.identity.audited_regions)
|
||||
|
||||
else:
|
||||
partition = prowler_sdk_provider.identity.partition
|
||||
try:
|
||||
regions = prowler_sdk_provider.get_available_aws_service_regions(
|
||||
"ec2", partition
|
||||
)
|
||||
|
||||
except KeyError:
|
||||
raise RuntimeError(
|
||||
f"No region data available for partition {partition!r}; "
|
||||
f"cannot determine regions to scan for "
|
||||
f"{prowler_api_provider.uid}"
|
||||
)
|
||||
|
||||
logger.warning(
|
||||
f"Could not enumerate enabled regions for AWS account "
|
||||
f"{prowler_api_provider.uid}; falling back to all regions in "
|
||||
f"partition {partition!r}"
|
||||
)
|
||||
|
||||
excluded = set(getattr(prowler_sdk_provider, "_excluded_regions", None) or ())
|
||||
return sorted(regions - excluded)
|
||||
|
||||
|
||||
def get_aioboto3_session(boto3_session: boto3.Session) -> aioboto3.Session:
|
||||
return aioboto3.Session(botocore_session=boto3_session._session)
|
||||
|
||||
@@ -234,6 +302,8 @@ def sync_aws_account(
|
||||
)
|
||||
|
||||
try:
|
||||
func_t0 = time.perf_counter()
|
||||
|
||||
# `ecr:image_layers` uses `aioboto3_session` instead of `boto3_session`
|
||||
if func_name == "ecr:image_layers":
|
||||
cartography_aws.RESOURCE_FUNCTIONS[func_name](
|
||||
@@ -257,7 +327,15 @@ def sync_aws_account(
|
||||
else:
|
||||
cartography_aws.RESOURCE_FUNCTIONS[func_name](**sync_args)
|
||||
|
||||
logger.info(
|
||||
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s (FAILED)"
|
||||
)
|
||||
|
||||
exception_message = utils.stringify_exception(
|
||||
e, f"Exception for AWS sync function: {func_name}"
|
||||
)
|
||||
@@ -277,3 +355,16 @@ def sync_aws_account(
|
||||
)
|
||||
|
||||
return failed_syncs
|
||||
|
||||
|
||||
def extract_short_uid(uid: str) -> str:
|
||||
"""Return the short identifier from an AWS ARN or resource ID.
|
||||
|
||||
Supported inputs end in one of:
|
||||
- `<type>/<id>` (e.g. `instance/i-xxx`)
|
||||
- `<type>:<id>` (e.g. `function:name`)
|
||||
- `<id>` (e.g. `bucket-name` or `i-xxx`)
|
||||
|
||||
If `uid` is already a short resource ID, it is returned unchanged.
|
||||
"""
|
||||
return uid.rsplit("/", 1)[-1].rsplit(":", 1)[-1]
|
||||
|
||||
@@ -18,28 +18,45 @@ logger = get_task_logger(__name__)
|
||||
|
||||
def cleanup_stale_attack_paths_scans() -> dict:
|
||||
"""
|
||||
Find `EXECUTING` `AttackPathsScan` scans whose workers are dead or that have
|
||||
exceeded the stale threshold, and mark them as `FAILED`.
|
||||
Mark stale `AttackPathsScan` rows as `FAILED`.
|
||||
|
||||
Two-pass detection:
|
||||
Covers two stuck-state scenarios:
|
||||
1. `EXECUTING` scans whose workers are dead, or that have exceeded the
|
||||
stale threshold while alive.
|
||||
2. `SCHEDULED` scans that never made it to a worker — parent scan
|
||||
crashed before dispatch, broker lost the message, etc. Detected by
|
||||
age plus the parent `Scan` no longer being in flight.
|
||||
"""
|
||||
threshold = timedelta(minutes=ATTACK_PATHS_SCAN_STALE_THRESHOLD_MINUTES)
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
cutoff = now - threshold
|
||||
|
||||
cleaned_up: list[str] = []
|
||||
cleaned_up.extend(_cleanup_stale_executing_scans(cutoff))
|
||||
cleaned_up.extend(_cleanup_stale_scheduled_scans(cutoff))
|
||||
|
||||
logger.info(
|
||||
f"Stale `AttackPathsScan` cleanup: {len(cleaned_up)} scan(s) cleaned up"
|
||||
)
|
||||
return {"cleaned_up_count": len(cleaned_up), "scan_ids": cleaned_up}
|
||||
|
||||
|
||||
def _cleanup_stale_executing_scans(cutoff: datetime) -> list[str]:
|
||||
"""
|
||||
Two-pass detection for `EXECUTING` scans:
|
||||
1. If `TaskResult.worker` exists, ping the worker.
|
||||
- Dead worker: cleanup immediately (any age).
|
||||
- Alive + past threshold: revoke the task, then cleanup.
|
||||
- Alive + within threshold: skip.
|
||||
2. If no worker field: fall back to time-based heuristic only.
|
||||
"""
|
||||
threshold = timedelta(minutes=ATTACK_PATHS_SCAN_STALE_THRESHOLD_MINUTES)
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
cutoff = now - threshold
|
||||
|
||||
executing_scans = (
|
||||
executing_scans = list(
|
||||
AttackPathsScan.all_objects.using(MainRouter.admin_db)
|
||||
.filter(state=StateChoices.EXECUTING)
|
||||
.select_related("task__task_runner_task")
|
||||
)
|
||||
|
||||
# Cache worker liveness so each worker is pinged at most once
|
||||
executing_scans = list(executing_scans)
|
||||
workers = {
|
||||
tr.worker
|
||||
for scan in executing_scans
|
||||
@@ -48,7 +65,7 @@ def cleanup_stale_attack_paths_scans() -> dict:
|
||||
}
|
||||
worker_alive = {w: _is_worker_alive(w) for w in workers}
|
||||
|
||||
cleaned_up = []
|
||||
cleaned_up: list[str] = []
|
||||
|
||||
for scan in executing_scans:
|
||||
task_result = (
|
||||
@@ -65,9 +82,7 @@ def cleanup_stale_attack_paths_scans() -> dict:
|
||||
|
||||
# Alive but stale — revoke before cleanup
|
||||
_revoke_task(task_result)
|
||||
reason = (
|
||||
"Scan exceeded stale threshold — " "cleaned up by periodic task"
|
||||
)
|
||||
reason = "Scan exceeded stale threshold — cleaned up by periodic task"
|
||||
else:
|
||||
reason = "Worker dead — cleaned up by periodic task"
|
||||
else:
|
||||
@@ -82,10 +97,57 @@ def cleanup_stale_attack_paths_scans() -> dict:
|
||||
if _cleanup_scan(scan, task_result, reason):
|
||||
cleaned_up.append(str(scan.id))
|
||||
|
||||
logger.info(
|
||||
f"Stale `AttackPathsScan` cleanup: {len(cleaned_up)} scan(s) cleaned up"
|
||||
return cleaned_up
|
||||
|
||||
|
||||
def _cleanup_stale_scheduled_scans(cutoff: datetime) -> list[str]:
|
||||
"""
|
||||
Cleanup `SCHEDULED` scans that never reached a worker.
|
||||
|
||||
Detection:
|
||||
- `state == SCHEDULED`
|
||||
- `started_at < cutoff`
|
||||
- parent `Scan` is no longer in flight (terminal state or missing). This
|
||||
avoids cleaning up rows whose parent Prowler scan is legitimately still
|
||||
running.
|
||||
|
||||
For each match: revoke the queued task (best-effort; harmless if already
|
||||
consumed), atomically flip to `FAILED`, and mark the `TaskResult`. The
|
||||
temp Neo4j database is never created while `SCHEDULED`, so no drop is
|
||||
needed.
|
||||
"""
|
||||
scheduled_scans = list(
|
||||
AttackPathsScan.all_objects.using(MainRouter.admin_db)
|
||||
.filter(
|
||||
state=StateChoices.SCHEDULED,
|
||||
started_at__lt=cutoff,
|
||||
)
|
||||
.select_related("task__task_runner_task", "scan")
|
||||
)
|
||||
return {"cleaned_up_count": len(cleaned_up), "scan_ids": cleaned_up}
|
||||
|
||||
cleaned_up: list[str] = []
|
||||
parent_terminal = (
|
||||
StateChoices.COMPLETED,
|
||||
StateChoices.FAILED,
|
||||
StateChoices.CANCELLED,
|
||||
)
|
||||
|
||||
for scan in scheduled_scans:
|
||||
parent_scan = scan.scan
|
||||
if parent_scan is not None and parent_scan.state not in parent_terminal:
|
||||
continue
|
||||
|
||||
task_result = (
|
||||
getattr(scan.task, "task_runner_task", None) if scan.task else None
|
||||
)
|
||||
if task_result:
|
||||
_revoke_task(task_result, terminate=False)
|
||||
|
||||
reason = "Scan never started — cleaned up by periodic task"
|
||||
if _cleanup_scheduled_scan(scan, task_result, reason):
|
||||
cleaned_up.append(str(scan.id))
|
||||
|
||||
return cleaned_up
|
||||
|
||||
|
||||
def _is_worker_alive(worker: str) -> bool:
|
||||
@@ -98,12 +160,17 @@ def _is_worker_alive(worker: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _revoke_task(task_result) -> None:
|
||||
"""Send `SIGTERM` to a hung Celery task. Non-fatal on failure."""
|
||||
def _revoke_task(task_result, terminate: bool = True) -> None:
|
||||
"""Revoke a Celery task. Non-fatal on failure.
|
||||
|
||||
`terminate=True` SIGTERMs the worker if the task is mid-execution; use
|
||||
for EXECUTING cleanup. `terminate=False` only marks the task id revoked
|
||||
across workers, so any worker pulling the queued message discards it;
|
||||
use for SCHEDULED cleanup where the task hasn't run yet.
|
||||
"""
|
||||
try:
|
||||
current_app.control.revoke(
|
||||
task_result.task_id, terminate=True, signal="SIGTERM"
|
||||
)
|
||||
kwargs = {"terminate": True, "signal": "SIGTERM"} if terminate else {}
|
||||
current_app.control.revoke(task_result.task_id, **kwargs)
|
||||
logger.info(f"Revoked task {task_result.task_id}")
|
||||
except Exception:
|
||||
logger.exception(f"Failed to revoke task {task_result.task_id}")
|
||||
@@ -125,28 +192,64 @@ def _cleanup_scan(scan, task_result, reason: str) -> bool:
|
||||
except Exception:
|
||||
logger.exception(f"Failed to drop temp database {tmp_db_name}")
|
||||
|
||||
# 2. Lock row, verify still EXECUTING, mark FAILED — all atomic
|
||||
with rls_transaction(str(scan.tenant_id)):
|
||||
try:
|
||||
fresh_scan = AttackPathsScan.objects.select_for_update().get(id=scan.id)
|
||||
except AttackPathsScan.DoesNotExist:
|
||||
logger.warning(f"Scan {scan_id_str} no longer exists, skipping")
|
||||
return False
|
||||
fresh_scan = _finalize_failed_scan(scan, StateChoices.EXECUTING, reason)
|
||||
if fresh_scan is None:
|
||||
return False
|
||||
|
||||
if fresh_scan.state != StateChoices.EXECUTING:
|
||||
logger.info(f"Scan {scan_id_str} is now {fresh_scan.state}, skipping")
|
||||
return False
|
||||
|
||||
_mark_scan_finished(fresh_scan, StateChoices.FAILED, {"global_error": reason})
|
||||
|
||||
# 3. Mark `TaskResult` as `FAILURE` (not RLS-protected, outside lock)
|
||||
# Mark `TaskResult` as `FAILURE` (not RLS-protected, outside lock)
|
||||
if task_result:
|
||||
task_result.status = states.FAILURE
|
||||
task_result.date_done = datetime.now(tz=timezone.utc)
|
||||
task_result.save(update_fields=["status", "date_done"])
|
||||
|
||||
# 4. Recover graph_data_ready if provider data still exists
|
||||
recover_graph_data_ready(fresh_scan)
|
||||
|
||||
logger.info(f"Cleaned up stale scan {scan_id_str}: {reason}")
|
||||
return True
|
||||
|
||||
|
||||
def _cleanup_scheduled_scan(scan, task_result, reason: str) -> bool:
|
||||
"""
|
||||
Clean up a `SCHEDULED` scan that never reached a worker.
|
||||
|
||||
Skips the temp Neo4j drop — the database is only created once the worker
|
||||
enters `EXECUTING`, so dropping it here just produces noisy log output.
|
||||
|
||||
Returns `True` if the scan was actually cleaned up, `False` if skipped.
|
||||
"""
|
||||
scan_id_str = str(scan.id)
|
||||
|
||||
fresh_scan = _finalize_failed_scan(scan, StateChoices.SCHEDULED, reason)
|
||||
if fresh_scan is None:
|
||||
return False
|
||||
|
||||
if task_result:
|
||||
task_result.status = states.FAILURE
|
||||
task_result.date_done = datetime.now(tz=timezone.utc)
|
||||
task_result.save(update_fields=["status", "date_done"])
|
||||
|
||||
logger.info(f"Cleaned up scheduled scan {scan_id_str}: {reason}")
|
||||
return True
|
||||
|
||||
|
||||
def _finalize_failed_scan(scan, expected_state: str, reason: str):
|
||||
"""
|
||||
Atomically lock the row, verify it's still in `expected_state`, and
|
||||
mark it `FAILED`. Returns the locked row on success, `None` if the
|
||||
row is gone or has already moved on.
|
||||
"""
|
||||
scan_id_str = str(scan.id)
|
||||
with rls_transaction(str(scan.tenant_id)):
|
||||
try:
|
||||
fresh_scan = AttackPathsScan.objects.select_for_update().get(id=scan.id)
|
||||
except AttackPathsScan.DoesNotExist:
|
||||
logger.warning(f"Scan {scan_id_str} no longer exists, skipping")
|
||||
return None
|
||||
|
||||
if fresh_scan.state != expected_state:
|
||||
logger.info(f"Scan {scan_id_str} is now {fresh_scan.state}, skipping")
|
||||
return None
|
||||
|
||||
_mark_scan_finished(fresh_scan, StateChoices.FAILED, {"global_error": reason})
|
||||
|
||||
return fresh_scan
|
||||
|
||||
@@ -8,9 +8,9 @@ from tasks.jobs.attack_paths import aws
|
||||
# Batch size for Neo4j write operations (resource labeling, cleanup)
|
||||
BATCH_SIZE = env.int("ATTACK_PATHS_BATCH_SIZE", 1000)
|
||||
# Batch size for Postgres findings fetch (keyset pagination page size)
|
||||
FINDINGS_BATCH_SIZE = env.int("ATTACK_PATHS_FINDINGS_BATCH_SIZE", 500)
|
||||
FINDINGS_BATCH_SIZE = env.int("ATTACK_PATHS_FINDINGS_BATCH_SIZE", 1000)
|
||||
# Batch size for temp-to-tenant graph sync (nodes and relationships per cursor page)
|
||||
SYNC_BATCH_SIZE = env.int("ATTACK_PATHS_SYNC_BATCH_SIZE", 250)
|
||||
SYNC_BATCH_SIZE = env.int("ATTACK_PATHS_SYNC_BATCH_SIZE", 1000)
|
||||
|
||||
# Neo4j internal labels (Prowler-specific, not provider-specific)
|
||||
# - `Internet`: Singleton node representing external internet access for exposed-resource queries
|
||||
@@ -37,6 +37,8 @@ class ProviderConfig:
|
||||
# Label for resources connected to the account node, enabling indexed finding lookups.
|
||||
resource_label: str # e.g., "_AWSResource"
|
||||
ingestion_function: Callable
|
||||
# Maps a Postgres resource UID (e.g. full ARN) to the short-id form Cartography stores on some node types (e.g. `i-xxx` for EC2Instance).
|
||||
short_uid_extractor: Callable[[str], str]
|
||||
|
||||
|
||||
# Provider Configurations
|
||||
@@ -48,6 +50,7 @@ AWS_CONFIG = ProviderConfig(
|
||||
uid_field="arn",
|
||||
resource_label="_AWSResource",
|
||||
ingestion_function=aws.start_aws_ingestion,
|
||||
short_uid_extractor=aws.extract_short_uid,
|
||||
)
|
||||
|
||||
PROVIDER_CONFIGS: dict[str, ProviderConfig] = {
|
||||
@@ -116,6 +119,21 @@ def get_provider_resource_label(provider_type: str) -> str:
|
||||
return config.resource_label if config else "_UnknownProviderResource"
|
||||
|
||||
|
||||
def _identity_short_uid(uid: str) -> str:
|
||||
"""Fallback short-uid extractor for providers without a custom mapping."""
|
||||
return uid
|
||||
|
||||
|
||||
def get_short_uid_extractor(provider_type: str) -> Callable[[str], str]:
|
||||
"""Get the short-uid extractor for a provider type.
|
||||
|
||||
Returns an identity function when the provider is unknown, so callers can
|
||||
rely on a callable always being returned.
|
||||
"""
|
||||
config = PROVIDER_CONFIGS.get(provider_type)
|
||||
return config.short_uid_extractor if config else _identity_short_uid
|
||||
|
||||
|
||||
# Dynamic Isolation Label Helpers
|
||||
# --------------------------------
|
||||
|
||||
|
||||
@@ -67,25 +67,52 @@ def retrieve_attack_paths_scan(
|
||||
return None
|
||||
|
||||
|
||||
def set_attack_paths_scan_task_id(
|
||||
tenant_id: str,
|
||||
scan_pk: str,
|
||||
task_id: str,
|
||||
) -> None:
|
||||
"""Persist the Celery `task_id` on the `AttackPathsScan` row.
|
||||
|
||||
Called at dispatch time (when `apply_async` returns) so the row carries
|
||||
the task id even while still `SCHEDULED`. This lets the periodic
|
||||
cleanup revoke queued messages for scans that never reached a worker.
|
||||
"""
|
||||
with rls_transaction(tenant_id):
|
||||
ProwlerAPIAttackPathsScan.objects.filter(id=scan_pk).update(task_id=task_id)
|
||||
|
||||
|
||||
def starting_attack_paths_scan(
|
||||
attack_paths_scan: ProwlerAPIAttackPathsScan,
|
||||
task_id: str,
|
||||
cartography_config: CartographyConfig,
|
||||
) -> None:
|
||||
with rls_transaction(attack_paths_scan.tenant_id):
|
||||
attack_paths_scan.task_id = task_id
|
||||
attack_paths_scan.state = StateChoices.EXECUTING
|
||||
attack_paths_scan.started_at = datetime.now(tz=timezone.utc)
|
||||
attack_paths_scan.update_tag = cartography_config.update_tag
|
||||
) -> bool:
|
||||
"""Flip the row from `SCHEDULED` to `EXECUTING` atomically.
|
||||
|
||||
attack_paths_scan.save(
|
||||
update_fields=[
|
||||
"task_id",
|
||||
"state",
|
||||
"started_at",
|
||||
"update_tag",
|
||||
]
|
||||
)
|
||||
Returns `False` if the row is gone or has already moved past
|
||||
`SCHEDULED` (e.g., periodic cleanup raced ahead and marked it
|
||||
`FAILED` while the worker message was still in flight).
|
||||
"""
|
||||
with rls_transaction(attack_paths_scan.tenant_id):
|
||||
try:
|
||||
locked = ProwlerAPIAttackPathsScan.objects.select_for_update().get(
|
||||
id=attack_paths_scan.id
|
||||
)
|
||||
except ProwlerAPIAttackPathsScan.DoesNotExist:
|
||||
return False
|
||||
|
||||
if locked.state != StateChoices.SCHEDULED:
|
||||
return False
|
||||
|
||||
locked.state = StateChoices.EXECUTING
|
||||
locked.started_at = datetime.now(tz=timezone.utc)
|
||||
locked.update_tag = cartography_config.update_tag
|
||||
locked.save(update_fields=["state", "started_at", "update_tag"])
|
||||
|
||||
# Keep the in-memory object the caller is holding in sync.
|
||||
attack_paths_scan.state = locked.state
|
||||
attack_paths_scan.started_at = locked.started_at
|
||||
attack_paths_scan.update_tag = locked.update_tag
|
||||
return True
|
||||
|
||||
|
||||
def _mark_scan_finished(
|
||||
|
||||
@@ -5,14 +5,14 @@ This module handles:
|
||||
- Adding resource labels to Cartography nodes for efficient lookups
|
||||
- Loading Prowler findings into the graph
|
||||
- Linking findings to resources
|
||||
- Cleaning up stale findings
|
||||
"""
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import Any, Generator
|
||||
from typing import Any, Callable, Generator
|
||||
from uuid import UUID
|
||||
|
||||
import neo4j
|
||||
|
||||
from cartography.config import Config as CartographyConfig
|
||||
from celery.utils.log import get_task_logger
|
||||
from tasks.jobs.attack_paths.config import (
|
||||
@@ -21,10 +21,10 @@ from tasks.jobs.attack_paths.config import (
|
||||
get_node_uid_field,
|
||||
get_provider_resource_label,
|
||||
get_root_node_label,
|
||||
get_short_uid_extractor,
|
||||
)
|
||||
from tasks.jobs.attack_paths.queries import (
|
||||
ADD_RESOURCE_LABEL_TEMPLATE,
|
||||
CLEANUP_FINDINGS_TEMPLATE,
|
||||
INSERT_FINDING_TEMPLATE,
|
||||
render_cypher_template,
|
||||
)
|
||||
@@ -58,7 +58,9 @@ _DB_QUERY_FIELDS = [
|
||||
]
|
||||
|
||||
|
||||
def _to_neo4j_dict(record: dict[str, Any], resource_uid: str) -> dict[str, Any]:
|
||||
def _to_neo4j_dict(
|
||||
record: dict[str, Any], resource_uid: str, resource_short_uid: str
|
||||
) -> dict[str, Any]:
|
||||
"""Transform a Django `.values()` record into a `dict` ready for Neo4j ingestion."""
|
||||
return {
|
||||
"id": str(record["id"]),
|
||||
@@ -76,6 +78,7 @@ def _to_neo4j_dict(record: dict[str, Any], resource_uid: str) -> dict[str, Any]:
|
||||
"muted": record["muted"],
|
||||
"muted_reason": record["muted_reason"],
|
||||
"resource_uid": resource_uid,
|
||||
"resource_short_uid": resource_short_uid,
|
||||
}
|
||||
|
||||
|
||||
@@ -88,18 +91,21 @@ def analysis(
|
||||
prowler_api_provider: Provider,
|
||||
scan_id: str,
|
||||
config: CartographyConfig,
|
||||
) -> None:
|
||||
) -> tuple[int, int]:
|
||||
"""
|
||||
Main entry point for Prowler findings analysis.
|
||||
|
||||
Adds resource labels, loads findings, and cleans up stale data.
|
||||
Adds resource labels and loads findings.
|
||||
Returns (labeled_nodes, findings_loaded).
|
||||
"""
|
||||
add_resource_label(
|
||||
total_labeled = add_resource_label(
|
||||
neo4j_session, prowler_api_provider.provider, str(prowler_api_provider.uid)
|
||||
)
|
||||
findings_data = stream_findings_with_resources(prowler_api_provider, scan_id)
|
||||
load_findings(neo4j_session, findings_data, prowler_api_provider, config)
|
||||
cleanup_findings(neo4j_session, prowler_api_provider, config)
|
||||
total_loaded = load_findings(
|
||||
neo4j_session, findings_data, prowler_api_provider, config
|
||||
)
|
||||
return total_labeled, total_loaded
|
||||
|
||||
|
||||
def add_resource_label(
|
||||
@@ -149,12 +155,11 @@ def load_findings(
|
||||
findings_batches: Generator[list[dict[str, Any]], None, None],
|
||||
prowler_api_provider: Provider,
|
||||
config: CartographyConfig,
|
||||
) -> None:
|
||||
) -> int:
|
||||
"""Load Prowler findings into the graph, linking them to resources."""
|
||||
query = render_cypher_template(
|
||||
INSERT_FINDING_TEMPLATE,
|
||||
{
|
||||
"__ROOT_NODE_LABEL__": get_root_node_label(prowler_api_provider.provider),
|
||||
"__NODE_UID_FIELD__": get_node_uid_field(prowler_api_provider.provider),
|
||||
"__RESOURCE_LABEL__": get_provider_resource_label(
|
||||
prowler_api_provider.provider
|
||||
@@ -163,13 +168,14 @@ def load_findings(
|
||||
)
|
||||
|
||||
parameters = {
|
||||
"provider_uid": str(prowler_api_provider.uid),
|
||||
"last_updated": config.update_tag,
|
||||
"prowler_version": ProwlerConfig.prowler_version,
|
||||
}
|
||||
|
||||
batch_num = 0
|
||||
total_records = 0
|
||||
edges_merged = 0
|
||||
edges_dropped = 0
|
||||
for batch in findings_batches:
|
||||
batch_num += 1
|
||||
batch_size = len(batch)
|
||||
@@ -178,31 +184,16 @@ def load_findings(
|
||||
parameters["findings_data"] = batch
|
||||
|
||||
logger.info(f"Loading findings batch {batch_num} ({batch_size} records)")
|
||||
neo4j_session.run(query, parameters)
|
||||
summary = neo4j_session.run(query, parameters).single()
|
||||
if summary is not None:
|
||||
edges_merged += summary.get("merged_count", 0)
|
||||
edges_dropped += summary.get("dropped_count", 0)
|
||||
|
||||
logger.info(f"Finished loading {total_records} records in {batch_num} batches")
|
||||
|
||||
|
||||
def cleanup_findings(
|
||||
neo4j_session: neo4j.Session,
|
||||
prowler_api_provider: Provider,
|
||||
config: CartographyConfig,
|
||||
) -> None:
|
||||
"""Remove stale findings (classic Cartography behaviour)."""
|
||||
parameters = {
|
||||
"last_updated": config.update_tag,
|
||||
"batch_size": BATCH_SIZE,
|
||||
}
|
||||
|
||||
batch = 1
|
||||
deleted_count = 1
|
||||
while deleted_count > 0:
|
||||
logger.info(f"Cleaning findings batch {batch}")
|
||||
|
||||
result = neo4j_session.run(CLEANUP_FINDINGS_TEMPLATE, parameters)
|
||||
|
||||
deleted_count = result.single().get("deleted_findings_count", 0)
|
||||
batch += 1
|
||||
logger.info(
|
||||
f"Finished loading {total_records} records in {batch_num} batches "
|
||||
f"(edges_merged={edges_merged}, edges_dropped={edges_dropped})"
|
||||
)
|
||||
return total_records
|
||||
|
||||
|
||||
# Findings Streaming (Generator-based)
|
||||
@@ -226,8 +217,9 @@ def stream_findings_with_resources(
|
||||
)
|
||||
|
||||
tenant_id = prowler_api_provider.tenant_id
|
||||
short_uid_extractor = get_short_uid_extractor(prowler_api_provider.provider)
|
||||
for batch in _paginate_findings(tenant_id, scan_id):
|
||||
enriched = _enrich_batch_with_resources(batch, tenant_id)
|
||||
enriched = _enrich_batch_with_resources(batch, tenant_id, short_uid_extractor)
|
||||
if enriched:
|
||||
yield enriched
|
||||
|
||||
@@ -273,7 +265,9 @@ def _fetch_findings_batch(
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
# Use `all_objects` to get `Findings` even on soft-deleted `Providers`
|
||||
# But even the provider is already validated as active in this context
|
||||
qs = FindingModel.all_objects.filter(scan_id=scan_id).order_by("id")
|
||||
qs = FindingModel.all_objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).order_by("id")
|
||||
|
||||
if after_id is not None:
|
||||
qs = qs.filter(id__gt=after_id)
|
||||
@@ -288,6 +282,7 @@ def _fetch_findings_batch(
|
||||
def _enrich_batch_with_resources(
|
||||
findings_batch: list[dict[str, Any]],
|
||||
tenant_id: str,
|
||||
short_uid_extractor: Callable[[str], str],
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Enrich findings with their resource UIDs.
|
||||
@@ -299,7 +294,7 @@ def _enrich_batch_with_resources(
|
||||
resource_map = _build_finding_resource_map(finding_ids, tenant_id)
|
||||
|
||||
return [
|
||||
_to_neo4j_dict(finding, resource_uid)
|
||||
_to_neo4j_dict(finding, resource_uid, short_uid_extractor(resource_uid))
|
||||
for finding in findings_batch
|
||||
for resource_uid in resource_map.get(finding["id"], [])
|
||||
]
|
||||
|
||||
@@ -13,14 +13,13 @@ from tasks.jobs.attack_paths.config import (
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
# Indexes for Prowler findings and resource lookups
|
||||
# Indexes for Prowler Findings and resource lookups
|
||||
FINDINGS_INDEX_STATEMENTS = [
|
||||
# Resource indexes for Prowler Finding lookups
|
||||
"CREATE INDEX aws_resource_arn IF NOT EXISTS FOR (n:_AWSResource) ON (n.arn);",
|
||||
"CREATE INDEX aws_resource_id IF NOT EXISTS FOR (n:_AWSResource) ON (n.id);",
|
||||
# Prowler Finding indexes
|
||||
f"CREATE INDEX prowler_finding_id IF NOT EXISTS FOR (n:{PROWLER_FINDING_LABEL}) ON (n.id);",
|
||||
f"CREATE INDEX prowler_finding_lastupdated IF NOT EXISTS FOR (n:{PROWLER_FINDING_LABEL}) ON (n.lastupdated);",
|
||||
f"CREATE INDEX prowler_finding_status IF NOT EXISTS FOR (n:{PROWLER_FINDING_LABEL}) ON (n.status);",
|
||||
# Internet node index for MERGE lookups
|
||||
f"CREATE INDEX internet_id IF NOT EXISTS FOR (n:{INTERNET_NODE_LABEL}) ON (n.id);",
|
||||
|
||||
@@ -32,63 +32,59 @@ ADD_RESOURCE_LABEL_TEMPLATE = """
|
||||
"""
|
||||
|
||||
INSERT_FINDING_TEMPLATE = f"""
|
||||
MATCH (account:__ROOT_NODE_LABEL__ {{id: $provider_uid}})
|
||||
UNWIND $findings_data AS finding_data
|
||||
|
||||
OPTIONAL MATCH (account)-->(resource_by_uid:__RESOURCE_LABEL__)
|
||||
WHERE resource_by_uid.__NODE_UID_FIELD__ = finding_data.resource_uid
|
||||
WITH account, finding_data, resource_by_uid
|
||||
|
||||
OPTIONAL MATCH (account)-->(resource_by_id:__RESOURCE_LABEL__)
|
||||
OPTIONAL MATCH (resource_by_uid:__RESOURCE_LABEL__ {{__NODE_UID_FIELD__: finding_data.resource_uid}})
|
||||
OPTIONAL MATCH (resource_by_id:__RESOURCE_LABEL__ {{id: finding_data.resource_uid}})
|
||||
WHERE resource_by_uid IS NULL
|
||||
AND resource_by_id.id = finding_data.resource_uid
|
||||
WITH account, finding_data, COALESCE(resource_by_uid, resource_by_id) AS resource
|
||||
WHERE resource IS NOT NULL
|
||||
OPTIONAL MATCH (resource_by_short:__RESOURCE_LABEL__ {{id: finding_data.resource_short_uid}})
|
||||
WHERE resource_by_uid IS NULL AND resource_by_id IS NULL
|
||||
WITH finding_data,
|
||||
resource_by_uid,
|
||||
resource_by_id,
|
||||
head(collect(resource_by_short)) AS resource_by_short
|
||||
WITH finding_data,
|
||||
COALESCE(resource_by_uid, resource_by_id, resource_by_short) AS resource
|
||||
|
||||
MERGE (finding:{PROWLER_FINDING_LABEL} {{id: finding_data.id}})
|
||||
ON CREATE SET
|
||||
finding.id = finding_data.id,
|
||||
finding.uid = finding_data.uid,
|
||||
finding.inserted_at = finding_data.inserted_at,
|
||||
finding.updated_at = finding_data.updated_at,
|
||||
finding.first_seen_at = finding_data.first_seen_at,
|
||||
finding.scan_id = finding_data.scan_id,
|
||||
finding.delta = finding_data.delta,
|
||||
finding.status = finding_data.status,
|
||||
finding.status_extended = finding_data.status_extended,
|
||||
finding.severity = finding_data.severity,
|
||||
finding.check_id = finding_data.check_id,
|
||||
finding.check_title = finding_data.check_title,
|
||||
finding.muted = finding_data.muted,
|
||||
finding.muted_reason = finding_data.muted_reason,
|
||||
finding.firstseen = timestamp(),
|
||||
finding.lastupdated = $last_updated,
|
||||
finding._module_name = 'cartography:prowler',
|
||||
finding._module_version = $prowler_version
|
||||
ON MATCH SET
|
||||
finding.status = finding_data.status,
|
||||
finding.status_extended = finding_data.status_extended,
|
||||
finding.lastupdated = $last_updated
|
||||
FOREACH (_ IN CASE WHEN resource IS NOT NULL THEN [1] ELSE [] END |
|
||||
MERGE (finding:{PROWLER_FINDING_LABEL} {{id: finding_data.id}})
|
||||
ON CREATE SET
|
||||
finding.id = finding_data.id,
|
||||
finding.uid = finding_data.uid,
|
||||
finding.inserted_at = finding_data.inserted_at,
|
||||
finding.updated_at = finding_data.updated_at,
|
||||
finding.first_seen_at = finding_data.first_seen_at,
|
||||
finding.scan_id = finding_data.scan_id,
|
||||
finding.delta = finding_data.delta,
|
||||
finding.status = finding_data.status,
|
||||
finding.status_extended = finding_data.status_extended,
|
||||
finding.severity = finding_data.severity,
|
||||
finding.check_id = finding_data.check_id,
|
||||
finding.check_title = finding_data.check_title,
|
||||
finding.muted = finding_data.muted,
|
||||
finding.muted_reason = finding_data.muted_reason,
|
||||
finding.firstseen = timestamp(),
|
||||
finding.lastupdated = $last_updated,
|
||||
finding._module_name = 'cartography:prowler',
|
||||
finding._module_version = $prowler_version
|
||||
ON MATCH SET
|
||||
finding.status = finding_data.status,
|
||||
finding.status_extended = finding_data.status_extended,
|
||||
finding.lastupdated = $last_updated
|
||||
MERGE (resource)-[rel:HAS_FINDING]->(finding)
|
||||
ON CREATE SET
|
||||
rel.firstseen = timestamp(),
|
||||
rel.lastupdated = $last_updated,
|
||||
rel._module_name = 'cartography:prowler',
|
||||
rel._module_version = $prowler_version
|
||||
ON MATCH SET
|
||||
rel.lastupdated = $last_updated
|
||||
)
|
||||
|
||||
MERGE (resource)-[rel:HAS_FINDING]->(finding)
|
||||
ON CREATE SET
|
||||
rel.firstseen = timestamp(),
|
||||
rel.lastupdated = $last_updated,
|
||||
rel._module_name = 'cartography:prowler',
|
||||
rel._module_version = $prowler_version
|
||||
ON MATCH SET
|
||||
rel.lastupdated = $last_updated
|
||||
"""
|
||||
WITH sum(CASE WHEN resource IS NOT NULL THEN 1 ELSE 0 END) AS merged_count,
|
||||
sum(CASE WHEN resource IS NULL THEN 1 ELSE 0 END) AS dropped_count
|
||||
|
||||
CLEANUP_FINDINGS_TEMPLATE = f"""
|
||||
MATCH (finding:{PROWLER_FINDING_LABEL})
|
||||
WHERE finding.lastupdated < $last_updated
|
||||
|
||||
WITH finding LIMIT $batch_size
|
||||
|
||||
DETACH DELETE finding
|
||||
|
||||
RETURN COUNT(finding) AS deleted_findings_count
|
||||
RETURN merged_count, dropped_count
|
||||
"""
|
||||
|
||||
# Internet queries (used by internet.py)
|
||||
|
||||
@@ -55,6 +55,7 @@ exception propagates to Celery.
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from typing import Any
|
||||
|
||||
from cartography.config import Config as CartographyConfig
|
||||
@@ -96,6 +97,19 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
)
|
||||
attack_paths_scan = db_utils.retrieve_attack_paths_scan(tenant_id, scan_id)
|
||||
|
||||
# Idempotency guard: cleanup may have flipped this row to a terminal state
|
||||
# while the message was still in flight. Bail out before touching state.
|
||||
if attack_paths_scan and attack_paths_scan.state in (
|
||||
StateChoices.FAILED,
|
||||
StateChoices.COMPLETED,
|
||||
StateChoices.CANCELLED,
|
||||
):
|
||||
logger.warning(
|
||||
f"Attack Paths scan {attack_paths_scan.id} already in terminal "
|
||||
f"state {attack_paths_scan.state}; skipping execution"
|
||||
)
|
||||
return {}
|
||||
|
||||
# Checks before starting the scan
|
||||
if not cartography_ingestion_function:
|
||||
ingestion_exceptions = {
|
||||
@@ -113,12 +127,17 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
|
||||
else:
|
||||
if not attack_paths_scan:
|
||||
# Safety net for in-flight messages or direct task invocations; dispatcher normally pre-creates the row.
|
||||
logger.warning(
|
||||
f"No Attack Paths Scan found for scan {scan_id} and tenant {tenant_id}, let's create it then"
|
||||
)
|
||||
attack_paths_scan = db_utils.create_attack_paths_scan(
|
||||
tenant_id, scan_id, prowler_api_provider.id
|
||||
)
|
||||
if attack_paths_scan and task_id:
|
||||
db_utils.set_attack_paths_scan_task_id(
|
||||
tenant_id, attack_paths_scan.id, task_id
|
||||
)
|
||||
|
||||
tmp_database_name = graph_database.get_database_name(
|
||||
attack_paths_scan.id, temporary=True
|
||||
@@ -140,8 +159,18 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
)
|
||||
|
||||
# Starting the Attack Paths scan
|
||||
db_utils.starting_attack_paths_scan(
|
||||
attack_paths_scan, task_id, tenant_cartography_config
|
||||
if not db_utils.starting_attack_paths_scan(
|
||||
attack_paths_scan, tenant_cartography_config
|
||||
):
|
||||
logger.warning(
|
||||
f"Attack Paths scan {attack_paths_scan.id} no longer in SCHEDULED state; cleanup likely raced ahead"
|
||||
)
|
||||
return {}
|
||||
|
||||
scan_t0 = time.perf_counter()
|
||||
logger.info(
|
||||
f"Starting Attack Paths scan ({attack_paths_scan.id}) for "
|
||||
f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}"
|
||||
)
|
||||
|
||||
subgraph_dropped = False
|
||||
@@ -169,6 +198,7 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 2)
|
||||
|
||||
# The real scan, where iterates over cloud services
|
||||
t0 = time.perf_counter()
|
||||
ingestion_exceptions = utils.call_within_event_loop(
|
||||
cartography_ingestion_function,
|
||||
tmp_neo4j_session,
|
||||
@@ -177,19 +207,23 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
prowler_sdk_provider,
|
||||
attack_paths_scan,
|
||||
)
|
||||
logger.info(
|
||||
f"Cartography ingestion completed in {time.perf_counter() - t0:.3f}s "
|
||||
f"(failed_syncs={len(ingestion_exceptions)})"
|
||||
)
|
||||
|
||||
# Post-processing: Just keeping it to be more Cartography compliant
|
||||
logger.info(
|
||||
f"Syncing Cartography ontology for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
cartography_ontology.run(tmp_neo4j_session, tmp_cartography_config)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 95)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94)
|
||||
|
||||
logger.info(
|
||||
f"Syncing Cartography analysis for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
cartography_analysis.run(tmp_neo4j_session, tmp_cartography_config)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 96)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 95)
|
||||
|
||||
# Creating Internet node and CAN_ACCESS relationships
|
||||
logger.info(
|
||||
@@ -198,14 +232,20 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
internet.analysis(
|
||||
tmp_neo4j_session, prowler_api_provider, tmp_cartography_config
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 96)
|
||||
|
||||
# Adding Prowler Finding nodes and relationships
|
||||
logger.info(
|
||||
f"Syncing Prowler analysis for AWS account {prowler_api_provider.uid}"
|
||||
)
|
||||
findings.analysis(
|
||||
t0 = time.perf_counter()
|
||||
labeled_nodes, findings_loaded = findings.analysis(
|
||||
tmp_neo4j_session, prowler_api_provider, scan_id, tmp_cartography_config
|
||||
)
|
||||
logger.info(
|
||||
f"Prowler analysis completed in {time.perf_counter() - t0:.3f}s "
|
||||
f"(findings={findings_loaded}, labeled_nodes={labeled_nodes})"
|
||||
)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 97)
|
||||
|
||||
logger.info(
|
||||
@@ -227,22 +267,33 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
logger.info(f"Deleting existing provider graph in {tenant_database_name}")
|
||||
db_utils.set_provider_graph_data_ready(attack_paths_scan, False)
|
||||
provider_gated = True
|
||||
graph_database.drop_subgraph(
|
||||
|
||||
t0 = time.perf_counter()
|
||||
deleted_nodes = graph_database.drop_subgraph(
|
||||
database=tenant_database_name,
|
||||
provider_id=str(prowler_api_provider.id),
|
||||
)
|
||||
logger.info(
|
||||
f"Deleted existing provider graph in {time.perf_counter() - t0:.3f}s "
|
||||
f"(deleted_nodes={deleted_nodes})"
|
||||
)
|
||||
subgraph_dropped = True
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 98)
|
||||
|
||||
logger.info(
|
||||
f"Syncing graph from {tmp_database_name} into {tenant_database_name}"
|
||||
)
|
||||
sync.sync_graph(
|
||||
t0 = time.perf_counter()
|
||||
sync_result = sync.sync_graph(
|
||||
source_database=tmp_database_name,
|
||||
target_database=tenant_database_name,
|
||||
tenant_id=str(prowler_api_provider.tenant_id),
|
||||
provider_id=str(prowler_api_provider.id),
|
||||
)
|
||||
logger.info(
|
||||
f"Synced graph in {time.perf_counter() - t0:.3f}s "
|
||||
f"(nodes={sync_result['nodes']}, relationships={sync_result['relationships']})"
|
||||
)
|
||||
sync_completed = True
|
||||
db_utils.set_graph_data_ready(attack_paths_scan, True)
|
||||
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 99)
|
||||
@@ -250,17 +301,16 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
|
||||
logger.info(f"Clearing Neo4j cache for database {tenant_database_name}")
|
||||
graph_database.clear_cache(tenant_database_name)
|
||||
|
||||
logger.info(
|
||||
f"Completed Cartography ({attack_paths_scan.id}) for "
|
||||
f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}"
|
||||
)
|
||||
|
||||
logger.info(f"Dropping temporary Neo4j database {tmp_database_name}")
|
||||
graph_database.drop_database(tmp_database_name)
|
||||
|
||||
db_utils.finish_attack_paths_scan(
|
||||
attack_paths_scan, StateChoices.COMPLETED, ingestion_exceptions
|
||||
)
|
||||
logger.info(
|
||||
f"Attack Paths scan completed in {time.perf_counter() - scan_t0:.3f}s "
|
||||
f"(state=completed, failed_syncs={len(ingestion_exceptions)})"
|
||||
)
|
||||
return ingestion_exceptions
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -5,6 +5,8 @@ This module handles syncing graph data from temporary scan databases
|
||||
to the tenant database, adding provider isolation labels and properties.
|
||||
"""
|
||||
|
||||
import time
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import Any
|
||||
|
||||
@@ -81,6 +83,7 @@ def sync_nodes(
|
||||
Source and target sessions are opened sequentially per batch to avoid
|
||||
holding two Bolt connections simultaneously for the entire sync duration.
|
||||
"""
|
||||
t0 = time.perf_counter()
|
||||
last_id = -1
|
||||
total_synced = 0
|
||||
|
||||
@@ -117,7 +120,7 @@ def sync_nodes(
|
||||
|
||||
total_synced += batch_count
|
||||
logger.info(
|
||||
f"Synced {total_synced} nodes from {source_database} to {target_database}"
|
||||
f"Synced {total_synced} nodes from {source_database} to {target_database} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
|
||||
return total_synced
|
||||
@@ -136,6 +139,7 @@ def sync_relationships(
|
||||
Source and target sessions are opened sequentially per batch to avoid
|
||||
holding two Bolt connections simultaneously for the entire sync duration.
|
||||
"""
|
||||
t0 = time.perf_counter()
|
||||
last_id = -1
|
||||
total_synced = 0
|
||||
|
||||
@@ -166,7 +170,7 @@ def sync_relationships(
|
||||
|
||||
total_synced += batch_count
|
||||
logger.info(
|
||||
f"Synced {total_synced} relationships from {source_database} to {target_database}"
|
||||
f"Synced {total_synced} relationships from {source_database} to {target_database} in {time.perf_counter() - t0:.3f}s"
|
||||
)
|
||||
|
||||
return total_synced
|
||||
|
||||
@@ -297,12 +297,15 @@ def backfill_daily_severity_summaries(tenant_id: str, days: int = None):
|
||||
}
|
||||
|
||||
|
||||
def backfill_scan_category_summaries(tenant_id: str, scan_id: str):
|
||||
def aggregate_scan_category_summaries(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Backfill ScanCategorySummary for a completed scan.
|
||||
|
||||
Aggregates category counts from all findings in the scan and creates
|
||||
one ScanCategorySummary row per (category, severity) combination.
|
||||
Idempotent: re-runs replace the scan's existing rows so counts stay in
|
||||
sync with `Finding.muted` updates triggered outside scan completion
|
||||
(e.g. mute rules).
|
||||
|
||||
Args:
|
||||
tenant_id: Target tenant UUID
|
||||
@@ -312,11 +315,6 @@ def backfill_scan_category_summaries(tenant_id: str, scan_id: str):
|
||||
dict: Status indicating whether backfill was performed
|
||||
"""
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
if ScanCategorySummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).exists():
|
||||
return {"status": "already backfilled"}
|
||||
|
||||
if not Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
id=scan_id,
|
||||
@@ -337,9 +335,6 @@ def backfill_scan_category_summaries(tenant_id: str, scan_id: str):
|
||||
cache=category_counts,
|
||||
)
|
||||
|
||||
if not category_counts:
|
||||
return {"status": "no categories to backfill"}
|
||||
|
||||
category_summaries = [
|
||||
ScanCategorySummary(
|
||||
tenant_id=tenant_id,
|
||||
@@ -353,20 +348,38 @@ def backfill_scan_category_summaries(tenant_id: str, scan_id: str):
|
||||
for (category, severity), counts in category_counts.items()
|
||||
]
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
ScanCategorySummary.objects.bulk_create(
|
||||
category_summaries, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
if category_summaries:
|
||||
with rls_transaction(tenant_id):
|
||||
# Upsert so re-runs (post-mute reaggregation) don't trip
|
||||
# `unique_category_severity_per_scan`; race-safe under concurrent writers.
|
||||
ScanCategorySummary.objects.bulk_create(
|
||||
category_summaries,
|
||||
batch_size=500,
|
||||
update_conflicts=True,
|
||||
unique_fields=["tenant_id", "scan_id", "category", "severity"],
|
||||
update_fields=[
|
||||
"total_findings",
|
||||
"failed_findings",
|
||||
"new_failed_findings",
|
||||
],
|
||||
)
|
||||
|
||||
if not category_counts:
|
||||
return {"status": "no categories to backfill"}
|
||||
|
||||
return {"status": "backfilled", "categories_count": len(category_counts)}
|
||||
|
||||
|
||||
def backfill_scan_resource_group_summaries(tenant_id: str, scan_id: str):
|
||||
def aggregate_scan_resource_group_summaries(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Backfill ScanGroupSummary for a completed scan.
|
||||
|
||||
Aggregates resource group counts from all findings in the scan and creates
|
||||
one ScanGroupSummary row per (resource_group, severity) combination.
|
||||
Idempotent: re-runs replace the scan's existing rows so counts stay in
|
||||
sync with `Finding.muted` updates triggered outside scan completion
|
||||
(e.g. mute rules) and with resource-inventory views reading from this
|
||||
table.
|
||||
|
||||
Args:
|
||||
tenant_id: Target tenant UUID
|
||||
@@ -376,11 +389,6 @@ def backfill_scan_resource_group_summaries(tenant_id: str, scan_id: str):
|
||||
dict: Status indicating whether backfill was performed
|
||||
"""
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
if ScanGroupSummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).exists():
|
||||
return {"status": "already backfilled"}
|
||||
|
||||
if not Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
id=scan_id,
|
||||
@@ -418,9 +426,6 @@ def backfill_scan_resource_group_summaries(tenant_id: str, scan_id: str):
|
||||
group_resources_cache=group_resources_cache,
|
||||
)
|
||||
|
||||
if not resource_group_counts:
|
||||
return {"status": "no resource groups to backfill"}
|
||||
|
||||
# Compute group-level resource counts (same value for all severity rows in a group)
|
||||
group_resource_counts = {
|
||||
grp: len(uids) for grp, uids in group_resources_cache.items()
|
||||
@@ -439,10 +444,25 @@ def backfill_scan_resource_group_summaries(tenant_id: str, scan_id: str):
|
||||
for (grp, severity), counts in resource_group_counts.items()
|
||||
]
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
ScanGroupSummary.objects.bulk_create(
|
||||
resource_group_summaries, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
if resource_group_summaries:
|
||||
with rls_transaction(tenant_id):
|
||||
# Upsert so re-runs (post-mute reaggregation) don't trip
|
||||
# `unique_resource_group_severity_per_scan`; race-safe under concurrent writers.
|
||||
ScanGroupSummary.objects.bulk_create(
|
||||
resource_group_summaries,
|
||||
batch_size=500,
|
||||
update_conflicts=True,
|
||||
unique_fields=["tenant_id", "scan_id", "resource_group", "severity"],
|
||||
update_fields=[
|
||||
"total_findings",
|
||||
"failed_findings",
|
||||
"new_failed_findings",
|
||||
"resources_count",
|
||||
],
|
||||
)
|
||||
|
||||
if not resource_group_counts:
|
||||
return {"status": "no resource groups to backfill"}
|
||||
|
||||
return {"status": "backfilled", "resource_groups_count": len(resource_group_counts)}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user