Compare commits

...

43 Commits

Author SHA1 Message Date
Prowler Bot 322a500352 fix(ui): centralize default muted findings filter on finding groups (#10819)
Co-authored-by: Alejandro Bailo <59607668+alejandrobailo@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-21 14:33:42 +02:00
Prowler Bot ea09ff8902 perf(api): speed up finding-groups /resources endpoint (#10817)
Co-authored-by: Adrián Peña <adrianjpr@gmail.com>
2026-04-21 13:37:52 +02:00
Prowler Bot 24ce8d268b fix(changelog): relocate entries for the SDK (#10813)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-21 08:20:47 +02:00
Prowler Bot 0eb7b34207 chore(deps): bump pyasn1 from 0.6.2 to 0.6.3 in /api (#10805)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Adrián Peña <adrianjpr@gmail.com>
2026-04-20 17:58:18 +02:00
Prowler Bot f6b9d8611c fix(api): align latest_resources scan selection with completed_at (#10804)
Co-authored-by: Adrián Peña <adrianjpr@gmail.com>
2026-04-20 17:35:40 +02:00
Prowler Bot 28175170ce chore(api): Bump version to v1.25.2 (#10796)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-20 15:41:52 +02:00
Prowler Bot f5cb033f91 chore(release): Bump version to v5.24.2 (#10793)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-20 15:41:20 +02:00
Prowler Bot 558e292a2a docs: Update version to v5.24.1 (#10795)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-20 15:40:52 +02:00
Prowler Bot a4938897ac chore(ui): Bump version to v5.24.2 (#10794)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-20 15:40:15 +02:00
Prowler Bot 2cb8179477 chore: review changelog for v5.24.1 (#10792)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-20 14:10:04 +02:00
Prowler Bot c9bbe7033b fix(ui): sorting and filtering for findings (#10790)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2026-04-20 13:46:36 +02:00
Prowler Bot 76ecb30968 fix(api): detect silent failures in ResourceFindingMapping (#10781)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-20 09:15:49 +02:00
Prowler Bot 84a60fe06b fix(ui): correct IaC findings counters (#10773)
Co-authored-by: Alan Buscaglia <gentlemanprogramming@gmail.com>
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2026-04-17 13:55:17 +02:00
Prowler Bot f71743b95b fix(cloudflare): guard validate_credentials against paginator infinite loops (#10772)
Co-authored-by: Andoni Alonso <14891798+andoniaf@users.noreply.github.com>
2026-04-17 11:38:12 +02:00
Prowler Bot 68dcc5a75c fix(ui): exclude muted findings and polish filter selectors (#10770)
Co-authored-by: Alejandro Bailo <59607668+alejandrobailo@users.noreply.github.com>
2026-04-17 11:16:41 +02:00
Prowler Bot 407ae24f04 perf(attack-paths): cleanup task prioritization, restore default batch sizes to 1000, upgrade Cartography to 0.135.0 (#10768)
Co-authored-by: Josema Camacho <josema@prowler.com>
2026-04-17 11:01:19 +02:00
Prowler Bot 17c4a286af chore(deps): bump msgraph-sdk to 1.55.0 and azure-mgmt-resource to 24.0.0, remove marshmallow (#10766)
Co-authored-by: Josema Camacho <josema@prowler.com>
2026-04-17 10:22:17 +02:00
Prowler Bot 69ee2cdcef fix(googleworkspace): treat secure Google defaults as PASS for Drive checks (#10765)
Co-authored-by: lydiavilchez <114735608+lydiavilchez@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-17 09:12:57 +02:00
Prowler Bot 3544ff5e75 fix: CHANGELOG minor issue (#10759)
Co-authored-by: Alejandro Bailo <59607668+alejandrobailo@users.noreply.github.com>
2026-04-16 17:10:44 +02:00
Prowler Bot 69287dc3a1 fix(api): exclude muted findings from pass_count, fail_count and manual_count (#10755) 2026-04-16 16:16:25 +02:00
Prowler Bot cf5848d11d fix(ui): upgrade React 19.2.5 and Next.js 16.2.3 to mitigate CVE-2026-23869 (#10754)
Co-authored-by: Alejandro Bailo <59607668+alejandrobailo@users.noreply.github.com>
2026-04-16 15:39:30 +02:00
Prowler Bot 8ead3fa6bb fix(api): add fallback handling for missing resources in findings (#10751)
Co-authored-by: Adrián Peña <adrianjpr@gmail.com>
2026-04-16 14:54:27 +02:00
Prowler Bot 21483cc12f fix(googleworkspace): treat secure Google defaults as PASS for Calendar checks (#10735)
Co-authored-by: lydiavilchez <114735608+lydiavilchez@users.noreply.github.com>
Co-authored-by: Andoni Alonso <14891798+andoniaf@users.noreply.github.com>
2026-04-16 13:36:14 +02:00
Prowler Bot 628de4bd06 fix(image): --registry-list crashes with AttributeError on global_provider (#10730)
Co-authored-by: Erich Blume <725328+eblume@users.noreply.github.com>
Co-authored-by: Andoni A. <14891798+andoniaf@users.noreply.github.com>
2026-04-16 13:31:08 +02:00
Prowler Bot 043f1ef138 fix(sdk): allow account-scoped tokens in Cloudflare connection test (#10731)
Co-authored-by: Andoni Alonso <14891798+andoniaf@users.noreply.github.com>
2026-04-16 13:25:09 +02:00
Prowler Bot a120da9409 fix(db): add missing tenant_id filter in queries (#10725)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-16 12:11:28 +02:00
Prowler Bot d5b71c6436 chore(ui): Bump version to v5.24.1 (#10713)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:37 +02:00
Prowler Bot 9114d09ba5 docs: Update version to v5.24.0 (#10716)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:27 +02:00
Prowler Bot d2b1224a30 chore(release): Bump version to v5.24.1 (#10712)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:13:54 +02:00
Prowler Bot 54b54e25e2 chore(api): Bump version to v1.25.1 (#10717)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:13:43 +02:00
Prowler Bot 1b45724ca8 chore(api): Update prowler dependency to v5.24 for release 5.24.0 (#10709)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 18:57:37 +02:00
Pepe Fagoaga ba5b23245f chore: review changelog for v5.24 (#10707) 2026-04-15 18:05:55 +02:00
Daniel Barranquero 43913b1592 feat(aws): support excluding regions from scans via CLI, env var, and config (#10688) 2026-04-15 17:59:46 +02:00
Alan Buscaglia 9e31160887 fix(ui): improve attack paths scan table UX and fix info banner variant (#10704)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2026-04-15 17:33:29 +02:00
Pepe Fagoaga 9a0c73256e chore: delete .opencode (#10702) 2026-04-15 15:10:40 +02:00
Alejandro Bailo 2a160a10df refactor(ui): remove legacy side drawers and clean code (#10692) 2026-04-15 13:55:57 +02:00
Alan Buscaglia 8d8bee165b feat(ui): improve attack paths scan selection UX (#10685) 2026-04-15 13:54:25 +02:00
Alan Buscaglia 606efec9f8 fix(ui): keep update credentials wizard open (#10675) 2026-04-15 13:50:20 +02:00
Alan Buscaglia d5354e8b1d feat(ui): add syntax highlighting to finding groups remediation code (#10698) 2026-04-15 12:58:35 +02:00
Rubén De la Torre Vico a96e5890dc docs: replace Excalidraw diagrams with Mermaid and fix architecture connections (#10697) 2026-04-15 12:51:29 +02:00
Pepe Fagoaga bb81c5dd2d docs: add contextual menu for copy and issue/feat (#10699) 2026-04-15 12:50:29 +02:00
Daniel Barranquero c3acb818d9 fix(vercel): handle team-scoped firewall config responses (#10695) 2026-04-15 11:59:20 +02:00
Andoni Alonso e6fc59267b docs: add Finding Groups documentation page (#10696)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-15 11:58:39 +02:00
190 changed files with 10943 additions and 6337 deletions
+1 -1
View File
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.24.0
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.24.2
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
+1
View File
@@ -84,6 +84,7 @@ continue.json
.continuerc.json
# AI Coding Assistants - OpenCode
.opencode/
opencode.json
# AI Coding Assistants - GitHub Copilot
+28 -1
View File
@@ -2,7 +2,34 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.25.0] (Prowler UNRELEASED)
## [1.25.2] (Prowler v5.24.2)
### 🔄 Changed
- Finding groups `/resources` endpoints now materialize the filtered finding IDs into a Python list before filtering `ResourceFindingMapping`, so PostgreSQL switches from a Merge Semi Join that read hundreds of thousands of RFM index entries to a Nested Loop Index Scan over `finding_id`. The `has_mappings.exists()` pre-check is removed, and a request-scoped cache deduplicates the finding-id round-trip across the helpers that build different RFM querysets [(#10816)](https://github.com/prowler-cloud/prowler/pull/10816)
### 🐞 Fixed
- `/finding-groups/latest/<check_id>/resources` now selects the latest completed scan per provider by `-completed_at` (then `-inserted_at`) instead of `-inserted_at`, matching the `/finding-groups/latest` summary path and the daily-summary upsert so overlapping scans no longer produce diverging `delta`/`new_count` between the two endpoints [(#10802)](https://github.com/prowler-cloud/prowler/pull/10802)
---
## [1.25.1] (Prowler v5.24.1)
### 🔄 Changed
- Attack Paths: Restore `SYNC_BATCH_SIZE` and `FINDINGS_BATCH_SIZE` defaults to 1000, upgrade Cartography to 0.135.0, enable Celery queue priority for cleanup task, rewrite Finding insertion, remove AWS graph cleanup and add timing logs [(#10729)](https://github.com/prowler-cloud/prowler/pull/10729)
### 🐞 Fixed
- Finding group resources endpoints now include findings without associated resources (orphaned IaC findings) as simulated resource rows, and return one row per finding when multiple findings share a resource [(#10708)](https://github.com/prowler-cloud/prowler/pull/10708)
- Attack Paths: Missing `tenant_id` filter while getting related findings after scan completes [(#10722)](https://github.com/prowler-cloud/prowler/pull/10722)
- Finding group counters `pass_count`, `fail_count` and `manual_count` now exclude muted findings [(#10753)](https://github.com/prowler-cloud/prowler/pull/10753)
- Silent data loss in `ResourceFindingMapping` bulk insert that left findings orphaned when `INSERT ... ON CONFLICT DO NOTHING` dropped rows without raising; added explicit `unique_fields` [(#10724)](https://github.com/prowler-cloud/prowler/pull/10724)
---
## [1.25.0] (Prowler v5.24.0)
### 🔄 Changed
+152 -128
View File
@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -682,21 +682,21 @@ requests = ">=2.21.0,<3.0.0"
[[package]]
name = "alibabacloud-tea-openapi"
version = "0.4.1"
version = "0.4.4"
description = "Alibaba Cloud openapi SDK Library for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "alibabacloud_tea_openapi-0.4.1-py3-none-any.whl", hash = "sha256:e46bfa3ca34086d2c357d217a0b7284ecbd4b3bab5c88e075e73aec637b0e4a0"},
{file = "alibabacloud_tea_openapi-0.4.1.tar.gz", hash = "sha256:2384b090870fdb089c3c40f3fb8cf0145b8c7d6c14abbac521f86a01abb5edaf"},
{file = "alibabacloud_tea_openapi-0.4.4-py3-none-any.whl", hash = "sha256:cea6bc1fe35b0319a8752cb99eb0ecb0dab7ca1a71b99c12970ba0867410995f"},
{file = "alibabacloud_tea_openapi-0.4.4.tar.gz", hash = "sha256:1b0917bc03cd49417da64945e92731716d53e2eb8707b235f54e45b7473221ce"},
]
[package.dependencies]
alibabacloud-credentials = ">=1.0.2,<2.0.0"
alibabacloud-gateway-spi = ">=0.0.2,<1.0.0"
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
cryptography = ">=3.0.0,<45.0.0"
cryptography = {version = ">=3.0.0,<47.0.0", markers = "python_version >= \"3.8\""}
darabonba-core = ">=1.0.3,<2.0.0"
[[package]]
@@ -1526,19 +1526,19 @@ typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-resource"
version = "23.3.0"
version = "24.0.0"
description = "Microsoft Azure Resource Management Client Library for Python"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_mgmt_resource-23.3.0-py3-none-any.whl", hash = "sha256:ab216ee28e29db6654b989746e0c85a1181f66653929d2cb6e48fba66d9af323"},
{file = "azure_mgmt_resource-23.3.0.tar.gz", hash = "sha256:fc4f1fd8b6aad23f8af4ed1f913df5f5c92df117449dc354fea6802a2829fea4"},
{file = "azure_mgmt_resource-24.0.0-py3-none-any.whl", hash = "sha256:27b32cd223e2784269f5a0db3c282042886ee4072d79cedc638438ece7cd0df4"},
{file = "azure_mgmt_resource-24.0.0.tar.gz", hash = "sha256:cf6b8995fcdd407ac9ff1dd474087129429a1d90dbb1ac77f97c19b96237b265"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
azure-mgmt-core = ">=1.5.0"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
@@ -1822,19 +1822,19 @@ crt = ["awscrt (==0.27.6)"]
[[package]]
name = "cartography"
version = "0.132.0"
version = "0.135.0"
description = "Explore assets and their relationships across your technical infrastructure."
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "cartography-0.132.0-py3-none-any.whl", hash = "sha256:c070aa51d0ab4479cb043cae70b35e7df49f2fb5f1fa95ccf10000bbeb952262"},
{file = "cartography-0.132.0.tar.gz", hash = "sha256:7c6332bc57fd2629d7b83aee7bd95a7b2edb0d51ef746efa0461399e0b66625c"},
{file = "cartography-0.135.0-py3-none-any.whl", hash = "sha256:c62c32a6917b8f23a8b98fe2b6c7c4a918b50f55918482966c4dae1cf5f538e1"},
{file = "cartography-0.135.0.tar.gz", hash = "sha256:3f500cd22c3b392d00e8b49f62acc95fd4dcd559ce514aafe2eb8101133c7a49"},
]
[package.dependencies]
adal = ">=1.2.4"
aioboto3 = ">=13.0.0"
aioboto3 = ">=15.0.0"
azure-cli-core = ">=2.26.0"
azure-identity = ">=1.5.0"
azure-keyvault-certificates = ">=4.0.0"
@@ -1852,9 +1852,9 @@ azure-mgmt-keyvault = ">=10.0.0"
azure-mgmt-logic = ">=10.0.0"
azure-mgmt-monitor = ">=3.0.0"
azure-mgmt-network = ">=25.0.0"
azure-mgmt-resource = ">=10.2.0,<25.0.0"
azure-mgmt-resource = ">=24.0.0,<25"
azure-mgmt-security = ">=5.0.0"
azure-mgmt-sql = ">=3.0.1,<4"
azure-mgmt-sql = ">=3.0.1"
azure-mgmt-storage = ">=16.0.0"
azure-mgmt-synapse = ">=2.0.0"
azure-mgmt-web = ">=7.0.0"
@@ -1862,38 +1862,39 @@ azure-synapse-artifacts = ">=0.17.0"
backoff = ">=2.1.2"
boto3 = ">=1.15.1"
botocore = ">=1.18.1"
cloudflare = ">=4.1.0,<5.0.0"
cloudflare = ">=4.1.0"
crowdstrike-falconpy = ">=0.5.1"
cryptography = "*"
dnspython = ">=1.15.0"
duo-client = "*"
google-api-python-client = ">=1.7.8"
cryptography = ">=45.0.0"
dnspython = ">=2.0.0"
duo-client = ">=5.5.0"
google-api-python-client = ">=2.0.0"
google-auth = ">=2.37.0"
google-cloud-asset = ">=1.0.0"
google-cloud-resource-manager = ">=1.14.2"
httpx = ">=0.24.0"
kubernetes = ">=22.6.0"
marshmallow = ">=3.0.0rc7"
msgraph-sdk = "*"
marshmallow = ">=4.0.0"
msgraph-sdk = ">=1.53.0"
msrestazure = ">=0.6.4"
neo4j = ">=6.0.0"
oci = ">=2.71.0"
okta = "<1.0.0"
packageurl-python = "*"
packaging = "*"
packageurl-python = ">=0.17.0"
packaging = ">=26.0.0"
pagerduty = ">=4.0.1"
policyuniverse = ">=1.1.0.0"
PyJWT = {version = ">=2.0.0", extras = ["crypto"]}
python-dateutil = "*"
python-dateutil = ">=2.9.0"
python-digitalocean = ">=1.16.0"
pyyaml = ">=5.3.1"
requests = ">=2.22.0"
scaleway = ">=2.10.0"
slack-sdk = ">=3.37.0"
statsd = "*"
statsd = ">=4.0.0"
typer = ">=0.9.0"
types-aiobotocore-ecr = "*"
xmltodict = "*"
types-aiobotocore-ecr = ">=3.1.0"
workos = ">=5.44.0"
xmltodict = ">=1.0.0"
[[package]]
name = "celery"
@@ -2503,62 +2504,74 @@ dev = ["bandit", "coverage", "flake8", "pydocstyle", "pylint", "pytest", "pytest
[[package]]
name = "cryptography"
version = "44.0.3"
version = "46.0.6"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
groups = ["main", "dev"]
files = [
{file = "cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d"},
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904"},
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44"},
{file = "cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d"},
{file = "cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d"},
{file = "cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f"},
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5"},
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b"},
{file = "cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028"},
{file = "cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c"},
{file = "cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"},
{file = "cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738"},
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c"},
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f"},
{file = "cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2"},
{file = "cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124"},
{file = "cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a"},
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d"},
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736"},
{file = "cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed"},
{file = "cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4"},
{file = "cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58"},
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb"},
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72"},
{file = "cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c"},
{file = "cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e"},
{file = "cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759"},
]
[package.dependencies]
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""]
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""]
pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
nox = ["nox[uv] (>=2024.4.15)"]
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -2961,7 +2974,7 @@ files = [
[package.dependencies]
autopep8 = "*"
Django = ">=4.2"
gprof2dot = ">=2017.9.19"
gprof2dot = ">=2017.09.19"
sqlparse = "*"
[[package]]
@@ -3740,19 +3753,19 @@ urllib3 = ["packaging", "urllib3"]
[[package]]
name = "google-auth-httplib2"
version = "0.2.1"
version = "0.2.0"
description = "Google Authentication Library: httplib2 transport"
optional = false
python-versions = ">=3.7"
python-versions = "*"
groups = ["main"]
files = [
{file = "google_auth_httplib2-0.2.1-py3-none-any.whl", hash = "sha256:1be94c611db91c01f9703e7f62b0a59bbd5587a95571c7b6fade510d648bc08b"},
{file = "google_auth_httplib2-0.2.1.tar.gz", hash = "sha256:5ef03be3927423c87fb69607b42df23a444e434ddb2555b73b3679793187b7de"},
{file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"},
{file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"},
]
[package.dependencies]
google-auth = ">=1.32.0,<3.0.0"
httplib2 = ">=0.19.0,<1.0.0"
google-auth = "*"
httplib2 = ">=0.19.0"
[[package]]
name = "google-cloud-access-context-manager"
@@ -4569,7 +4582,7 @@ files = [
[package.dependencies]
attrs = ">=22.2.0"
jsonschema-specifications = ">=2023.3.6"
jsonschema-specifications = ">=2023.03.6"
referencing = ">=0.28.4"
rpds-py = ">=0.7.1"
@@ -4777,7 +4790,7 @@ librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""]
mongodb = ["pymongo (==4.15.3)"]
msgpack = ["msgpack (==1.1.2)"]
pyro = ["pyro4 (==4.82)"]
qpid = ["qpid-python (==1.36.0.post1)", "qpid-tools (==1.36.0.post1)"]
qpid = ["qpid-python (==1.36.0-1)", "qpid-tools (==1.36.0-1)"]
redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2,<6.5)"]
slmq = ["softlayer_messaging (>=1.0.3)"]
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
@@ -4798,7 +4811,7 @@ files = [
]
[package.dependencies]
certifi = ">=14.5.14"
certifi = ">=14.05.14"
durationpy = ">=0.7"
google-auth = ">=1.0.1"
oauthlib = ">=3.2.2"
@@ -5181,24 +5194,16 @@ files = [
[[package]]
name = "marshmallow"
version = "3.26.2"
version = "4.3.0"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.9"
python-versions = ">=3.10"
groups = ["main", "dev"]
files = [
{file = "marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73"},
{file = "marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57"},
{file = "marshmallow-4.3.0-py3-none-any.whl", hash = "sha256:46c4fe6984707e3cbd485dfebbf0a59874f58d695aad05c1668d15e8c6e13b46"},
{file = "marshmallow-4.3.0.tar.gz", hash = "sha256:fb43c53b3fe240b8f6af37223d6ef1636f927ad9bea8ab323afad95dff090880"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
tests = ["pytest", "simplejson"]
[[package]]
name = "matplotlib"
version = "3.10.8"
@@ -5492,14 +5497,14 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.23.0"
version = "1.55.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "msgraph_sdk-1.23.0-py3-none-any.whl", hash = "sha256:58e0047b4ca59fd82022c02cd73fec0170a3d84f3b76721e3db2a0314df9a58a"},
{file = "msgraph_sdk-1.23.0.tar.gz", hash = "sha256:6dd1ba9a46f5f0ce8599fd9610133adbd9d1493941438b5d3632fce9e55ed607"},
{file = "msgraph_sdk-1.55.0-py3-none-any.whl", hash = "sha256:c8e68ebc4b88af5111de312e7fa910a4e76ddf48a4534feadb1fb8a411c48cfc"},
{file = "msgraph_sdk-1.55.0.tar.gz", hash = "sha256:6df691a31954a050d26b8a678968017e157d940fb377f2a8a4e17a9741b98756"},
]
[package.dependencies]
@@ -5925,23 +5930,24 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "oci"
version = "2.160.3"
version = "2.169.0"
description = "Oracle Cloud Infrastructure Python SDK"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "oci-2.160.3-py3-none-any.whl", hash = "sha256:858bff3e697098bdda44833d2476bfb4632126f0182178e7dbde4dbd156d71f0"},
{file = "oci-2.160.3.tar.gz", hash = "sha256:57514889be3b713a8385d86e3ba8a33cf46e3563c2a7e29a93027fb30b8a2537"},
{file = "oci-2.169.0-py3-none-any.whl", hash = "sha256:c71bb5143f307791082b3e33cc1545c2490a518cfed85ab1948ef5107c36d30b"},
{file = "oci-2.169.0.tar.gz", hash = "sha256:f3c5fff00b01783b5325ea7b13bf140053ec1e9f41da20bfb9c8a349ee7662fa"},
]
[package.dependencies]
certifi = "*"
circuitbreaker = {version = ">=1.3.1,<3.0.0", markers = "python_version >= \"3.7\""}
cryptography = ">=3.2.1,<46.0.0"
pyOpenSSL = ">=17.5.0,<25.0.0"
cryptography = ">=3.2.1,<47.0.0"
pyOpenSSL = ">=17.5.0,<27.0.0"
python-dateutil = ">=2.5.3,<3.0.0"
pytz = ">=2016.10"
urllib3 = {version = ">=2.6.3", markers = "python_version >= \"3.10.0\""}
[package.extras]
adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""]
@@ -6659,7 +6665,7 @@ files = [
[[package]]
name = "prowler"
version = "5.23.0"
version = "5.24.0"
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
optional = false
python-versions = ">=3.10,<3.13"
@@ -6679,7 +6685,7 @@ alibabacloud-rds20140815 = "12.0.0"
alibabacloud_sas20181203 = "6.1.0"
alibabacloud-sls20201230 = "5.9.0"
alibabacloud_sts20150401 = "1.1.6"
alibabacloud_tea_openapi = "0.4.1"
alibabacloud_tea_openapi = "0.4.4"
alibabacloud_vpc20160428 = "6.13.0"
alive-progress = "3.3.0"
awsipranges = "0.3.3"
@@ -6701,7 +6707,7 @@ azure-mgmt-postgresqlflexibleservers = "1.1.0"
azure-mgmt-rdbms = "10.1.0"
azure-mgmt-recoveryservices = "3.1.0"
azure-mgmt-recoveryservicesbackup = "9.2.0"
azure-mgmt-resource = "23.3.0"
azure-mgmt-resource = "24.0.0"
azure-mgmt-search = "9.1.0"
azure-mgmt-security = "7.0.0"
azure-mgmt-sql = "3.0.1"
@@ -6714,29 +6720,29 @@ boto3 = "1.40.61"
botocore = "1.40.61"
cloudflare = "4.3.1"
colorama = "0.4.6"
cryptography = "44.0.3"
cryptography = "46.0.6"
dash = "3.1.1"
dash-bootstrap-components = "2.0.3"
defusedxml = ">=0.7.1"
defusedxml = "0.7.1"
detect-secrets = "1.5.0"
dulwich = "0.23.0"
google-api-python-client = "2.163.0"
google-auth-httplib2 = ">=0.1,<0.3"
google-auth-httplib2 = "0.2.0"
h2 = "4.3.0"
jsonschema = "4.23.0"
kubernetes = "32.0.1"
markdown = "3.10.2"
microsoft-kiota-abstractions = "1.9.2"
msgraph-sdk = "1.23.0"
msgraph-sdk = "1.55.0"
numpy = "2.0.2"
oci = "2.160.3"
oci = "2.169.0"
openstacksdk = "4.2.0"
pandas = "2.2.3"
py-iam-expand = "0.1.0"
py-ocsf-models = "0.8.1"
pydantic = ">=2.0,<3.0"
pydantic = "2.12.5"
pygithub = "2.8.0"
python-dateutil = ">=2.9.0.post0,<3.0.0"
python-dateutil = "2.9.0.post0"
pytz = "2025.1"
schema = "0.7.5"
shodan = "1.31.0"
@@ -6748,8 +6754,8 @@ uuid6 = "2024.7.10"
[package.source]
type = "git"
url = "https://github.com/prowler-cloud/prowler.git"
reference = "master"
resolved_reference = "6ac90eb1b58590b6f2f51645dbef17b9231053f4"
reference = "v5.24"
resolved_reference = "ba5b23245f4805f46d67e67fc059aefd6831f7b3"
[[package]]
name = "psutil"
@@ -6914,14 +6920,14 @@ pydantic = ">=2.12.0,<3.0.0"
[[package]]
name = "pyasn1"
version = "0.6.2"
version = "0.6.3"
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf"},
{file = "pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b"},
{file = "pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde"},
{file = "pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf"},
]
[[package]]
@@ -7188,7 +7194,7 @@ files = [
]
[package.dependencies]
astroid = ">=3.2.2,<=3.3.0.dev0"
astroid = ">=3.2.2,<=3.3.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.3.7", markers = "python_version >= \"3.12\""},
@@ -7210,7 +7216,7 @@ description = "The MSALRuntime Python Interop Package"
optional = false
python-versions = ">=3.6"
groups = ["main"]
markers = "(platform_system == \"Windows\" or platform_system == \"Darwin\" or platform_system == \"Linux\") and sys_platform == \"win32\""
markers = "sys_platform == \"win32\" and (platform_system == \"Windows\" or platform_system == \"Darwin\" or platform_system == \"Linux\")"
files = [
{file = "pymsalruntime-0.18.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0c22e2e83faa10de422bbfaacc1bb2887c9025ee8a53f0fc2e4f7db01c4a7b66"},
{file = "pymsalruntime-0.18.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8ce2944a0f944833d047bb121396091e00287e2b6373716106da86ea99abf379"},
@@ -7288,18 +7294,19 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "
[[package]]
name = "pyopenssl"
version = "24.3.0"
version = "26.0.0"
description = "Python wrapper module around the OpenSSL library"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"},
{file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"},
{file = "pyopenssl-26.0.0-py3-none-any.whl", hash = "sha256:df94d28498848b98cc1c0ffb8ef1e71e40210d3b0a8064c9d29571ed2904bf81"},
{file = "pyopenssl-26.0.0.tar.gz", hash = "sha256:f293934e52936f2e3413b89c6ce36df66a0b34ae1ea3a053b8c5020ff2f513fc"},
]
[package.dependencies]
cryptography = ">=41.0.5,<45"
cryptography = ">=46.0.0,<47"
typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""}
[package.extras]
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"]
@@ -8202,10 +8209,10 @@ files = [
]
[package.dependencies]
botocore = ">=1.37.4,<2.0a0"
botocore = ">=1.37.4,<2.0a.0"
[package.extras]
crt = ["botocore[crt] (>=1.37.4,<2.0a0)"]
crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"]
[[package]]
name = "safety"
@@ -8807,6 +8814,23 @@ markupsafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog (>=2.3)"]
[[package]]
name = "workos"
version = "6.0.4"
description = "WorkOS Python Client"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "workos-6.0.4-py3-none-any.whl", hash = "sha256:548668b3702673536f853ba72a7b5bbbc269e467aaf9ac4f477b6e0177df5e21"},
{file = "workos-6.0.4.tar.gz", hash = "sha256:b0bfe8fd212b8567422c4ea3732eb33608794033eb3a69900c6b04db183c32d6"},
]
[package.dependencies]
cryptography = ">=46.0,<47.0"
httpx = ">=0.28,<1.0"
pyjwt = ">=2.12,<3.0"
[[package]]
name = "wrapt"
version = "1.17.3"
@@ -9400,4 +9424,4 @@ files = [
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
content-hash = "077e89853cfe3a6d934841488cfa5a98ff6c92b71f74b817b71387d11559f143"
content-hash = "5781e74b0692aed541fe445d6713d2dfd792bb226789501420aac4a8cb45aa2a"
+3 -4
View File
@@ -25,7 +25,7 @@ dependencies = [
"defusedxml==0.7.1",
"gunicorn==23.0.0",
"lxml==5.3.2",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.24",
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (==1.3.0)",
"sentry-sdk[django] (==2.56.0)",
@@ -38,7 +38,7 @@ dependencies = [
"matplotlib (==3.10.8)",
"reportlab (==4.4.10)",
"neo4j (==6.1.0)",
"cartography (==0.132.0)",
"cartography (==0.135.0)",
"gevent (==25.9.1)",
"werkzeug (==3.1.7)",
"sqlparse (==0.5.5)",
@@ -50,7 +50,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.25.0"
version = "1.25.2"
[project.scripts]
celery = "src.backend.config.settings.celery"
@@ -62,7 +62,6 @@ django-silk = "5.3.2"
docker = "7.1.0"
filelock = "3.20.3"
freezegun = "1.5.1"
marshmallow = "==3.26.2"
mypy = "1.10.1"
pylint = "3.2.5"
pytest = "9.0.3"
@@ -0,0 +1,23 @@
from django.db import migrations
TASK_NAME = "attack-paths-cleanup-stale-scans"
def set_cleanup_priority(apps, schema_editor):
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=0)
def unset_cleanup_priority(apps, schema_editor):
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=None)
class Migration(migrations.Migration):
dependencies = [
("api", "0089_backfill_finding_group_status_muted"),
]
operations = [
migrations.RunPython(set_cleanup_priority, unset_cleanup_priority),
]
+1 -1
View File
@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.25.0
version: 1.25.2
description: |-
Prowler API specification.
+140 -1
View File
@@ -57,6 +57,7 @@ from api.models import (
ProviderGroupMembership,
ProviderSecret,
Resource,
ResourceFindingMapping,
Role,
RoleProviderGroupRelationship,
SAMLConfiguration,
@@ -15465,7 +15466,7 @@ class TestFindingGroupViewSet:
attrs = data[0]["attributes"]
assert attrs["status"] == "FAIL"
assert attrs["muted"] is True
assert attrs["fail_count"] == 2
assert attrs["fail_count"] == 0
assert attrs["fail_muted_count"] == 2
assert attrs["pass_muted_count"] == 0
assert attrs["manual_muted_count"] == 0
@@ -16030,6 +16031,36 @@ class TestFindingGroupViewSet:
# s3_bucket_public_access has 2 findings with 2 different resources
assert len(data) == 2
def test_resources_id_matches_resource_id_for_mapped_findings(
self, authenticated_client, finding_groups_fixture
):
"""Findings with a resource expose the resource id as row id (hot path contract)."""
response = authenticated_client.get(
reverse(
"finding-group-resources", kwargs={"pk": "s3_bucket_public_access"}
),
{"filter[inserted_at]": TODAY},
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data, "expected resources in response"
resource_ids = set(
ResourceFindingMapping.objects.filter(
finding__check_id="s3_bucket_public_access",
).values_list("resource_id", flat=True)
)
finding_ids = set(
Finding.objects.filter(
check_id="s3_bucket_public_access",
).values_list("id", flat=True)
)
returned_ids = {item["id"] for item in data}
assert returned_ids <= {str(rid) for rid in resource_ids}
assert returned_ids.isdisjoint({str(fid) for fid in finding_ids})
def test_resources_fields(self, authenticated_client, finding_groups_fixture):
"""Test resource fields (uid, name, service, region, type) have valid values."""
response = authenticated_client.get(
@@ -17240,3 +17271,111 @@ class TestFindingGroupViewSet:
attrs = item["attributes"]
assert "finding_id" in attrs
assert attrs["finding_id"] in rds_finding_ids
def test_latest_resources_picks_scan_by_completed_at_when_overlap(
self,
authenticated_client,
tenants_fixture,
providers_fixture,
resources_fixture,
):
"""Overlapping scans on the same provider must resolve to the scan
with the latest completed_at, matching the /latest summary path and
the daily-summary upsert (keyed on midnight(completed_at)). Picking
by inserted_at here produced /resources and /latest reading from
different scans and reporting diverging delta/new counts.
"""
tenant = tenants_fixture[0]
provider = providers_fixture[0]
resource = resources_fixture[0]
check_id = "overlap_regression_check"
t0 = datetime.now(timezone.utc) - timedelta(hours=5)
t1 = t0 + timedelta(hours=1)
t1_end = t1 + timedelta(minutes=30)
t2 = t0 + timedelta(hours=4)
scan_long = Scan.objects.create(
name="long overlap scan",
provider=provider,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant_id=tenant.id,
started_at=t0,
completed_at=t2,
)
scan_short = Scan.objects.create(
name="short overlap scan",
provider=provider,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant_id=tenant.id,
started_at=t1,
completed_at=t1_end,
)
# inserted_at is auto_now_add so override with .update() to recreate
# the overlap shape: short scan inserted later but completed earlier.
Scan.all_objects.filter(pk=scan_long.pk).update(inserted_at=t0)
Scan.all_objects.filter(pk=scan_short.pk).update(inserted_at=t1)
scan_long.refresh_from_db()
scan_short.refresh_from_db()
assert scan_short.inserted_at > scan_long.inserted_at
assert scan_long.completed_at > scan_short.completed_at
long_finding = Finding.objects.create(
tenant_id=tenant.id,
uid=f"{check_id}_long",
scan=scan_long,
delta=None,
status=Status.FAIL,
status_extended="long scan finding",
impact=Severity.high,
impact_extended="high",
severity=Severity.high,
raw_result={"status": Status.FAIL, "severity": Severity.high},
check_id=check_id,
check_metadata={
"CheckId": check_id,
"checktitle": "Overlap regression",
"Description": "Overlapping scan regression.",
},
first_seen_at=t0,
muted=False,
)
long_finding.add_resources([resource])
short_finding = Finding.objects.create(
tenant_id=tenant.id,
uid=f"{check_id}_short",
scan=scan_short,
delta="new",
status=Status.FAIL,
status_extended="short scan finding",
impact=Severity.high,
impact_extended="high",
severity=Severity.high,
raw_result={"status": Status.FAIL, "severity": Severity.high},
check_id=check_id,
check_metadata={
"CheckId": check_id,
"checktitle": "Overlap regression",
"Description": "Overlapping scan regression.",
},
first_seen_at=t1,
muted=False,
)
short_finding.add_resources([resource])
response = authenticated_client.get(
reverse(
"finding-group-latest_resources",
kwargs={"check_id": check_id},
),
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert len(data) == 1
attrs = data[0]["attributes"]
assert attrs["finding_id"] == str(long_finding.id)
assert attrs["delta"] is None
+3 -2
View File
@@ -4225,10 +4225,11 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
Serializer for Finding Group Resources - resources within a finding group.
Returns individual resources with their current status, severity,
and timing information.
and timing information. Orphan findings (without any resource) expose the
finding id as `id` so the row stays identifiable in the UI.
"""
id = serializers.UUIDField(source="resource_id")
id = serializers.UUIDField(source="row_id")
resource = serializers.SerializerMethodField()
provider = serializers.SerializerMethodField()
finding_id = serializers.UUIDField()
+297 -62
View File
@@ -35,11 +35,13 @@ from django.db.models import (
CharField,
Count,
DecimalField,
Exists,
ExpressionWrapper,
F,
IntegerField,
Max,
Min,
OuterRef,
Prefetch,
Q,
QuerySet,
@@ -415,7 +417,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.25.0"
spectacular_settings.VERSION = "1.25.2"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -7125,17 +7127,16 @@ class FindingGroupViewSet(BaseRLSViewSet):
output_field=IntegerField(),
)
# `pass_count`, `fail_count` and `manual_count` count *every* finding
# for the check (muted or not) so the aggregated `status` reflects the
# underlying check outcome regardless of mute state. Whether the group
# is actionable is signalled by the orthogonal `muted` flag below.
# `pass_count`, `fail_count` and `manual_count` only count non-muted
# findings. Muted findings are tracked separately via the
# `*_muted_count` fields.
return (
queryset.values("check_id")
.annotate(
severity_order=Max(severity_case),
pass_count=Count("id", filter=Q(status="PASS")),
fail_count=Count("id", filter=Q(status="FAIL")),
manual_count=Count("id", filter=Q(status="MANUAL")),
pass_count=Count("id", filter=Q(status="PASS", muted=False)),
fail_count=Count("id", filter=Q(status="FAIL", muted=False)),
manual_count=Count("id", filter=Q(status="MANUAL", muted=False)),
pass_muted_count=Count("id", filter=Q(status="PASS", muted=True)),
fail_muted_count=Count("id", filter=Q(status="FAIL", muted=True)),
manual_muted_count=Count("id", filter=Q(status="MANUAL", muted=True)),
@@ -7280,12 +7281,14 @@ class FindingGroupViewSet(BaseRLSViewSet):
# finding-level aggregation path.
row.pop("nonmuted_count", None)
# Compute aggregated status. Counts are inclusive of muted findings,
# so the underlying check outcome surfaces even when the group is
# fully muted.
if row.get("fail_count", 0) > 0:
# Compute aggregated status from non-muted counts first, then
# fall back to muted counts so fully-muted groups still reflect
# the underlying check outcome.
total_fail = row.get("fail_count", 0) + row.get("fail_muted_count", 0)
total_pass = row.get("pass_count", 0) + row.get("pass_muted_count", 0)
if total_fail > 0:
row["status"] = "FAIL"
elif row.get("pass_count", 0) > 0:
elif total_pass > 0:
row["status"] = "PASS"
else:
row["status"] = "MANUAL"
@@ -7385,9 +7388,12 @@ class FindingGroupViewSet(BaseRLSViewSet):
if computed_params.get("status") or computed_params.getlist("status__in"):
queryset = queryset.annotate(
total_fail=F("fail_count") + F("fail_muted_count"),
total_pass=F("pass_count") + F("pass_muted_count"),
).annotate(
aggregated_status=Case(
When(fail_count__gt=0, then=Value("FAIL")),
When(pass_count__gt=0, then=Value("PASS")),
When(total_fail__gt=0, then=Value("FAIL")),
When(total_pass__gt=0, then=Value("PASS")),
default=Value("MANUAL"),
output_field=CharField(),
)
@@ -7407,6 +7413,25 @@ class FindingGroupViewSet(BaseRLSViewSet):
return filterset.qs
def _resolve_finding_ids(self, filtered_queryset):
"""
Materialize and request-cache the finding_ids list used to anchor
RFM lookups.
Turning `finding_id__in=Subquery(findings_qs)` into `finding_id__in=
[uuid, ...]` nudges PostgreSQL out of a Merge Semi Join that ends up
reading hundreds of thousands of RFM index entries just to post-
filter tenant_id. Caching on the ViewSet instance (one instance per
request) avoids duplicating the findings round-trip when several
helpers build different RFM querysets from the same filtered set.
"""
cached = getattr(self, "_finding_ids_cache", None)
if cached is not None and cached[0] is filtered_queryset:
return cached[1]
finding_ids = list(filtered_queryset.order_by().values_list("id", flat=True))
self._finding_ids_cache = (filtered_queryset, finding_ids)
return finding_ids
def _build_resource_mapping_queryset(
self, filtered_queryset, resource_ids=None, tenant_id: str | None = None
):
@@ -7416,10 +7441,10 @@ class FindingGroupViewSet(BaseRLSViewSet):
Starting from ResourceFindingMapping avoids scanning all mappings
before applying check_id/date filters on findings.
"""
finding_ids = filtered_queryset.order_by().values("id")
finding_ids = self._resolve_finding_ids(filtered_queryset)
mapping_queryset = ResourceFindingMapping.objects.filter(
finding_id__in=Subquery(finding_ids)
finding_id__in=finding_ids
)
if tenant_id:
mapping_queryset = mapping_queryset.filter(tenant_id=tenant_id)
@@ -7578,6 +7603,53 @@ class FindingGroupViewSet(BaseRLSViewSet):
.order_by(*ordering)
)
def _orphan_findings_queryset(self, filtered_queryset, finding_ids=None):
"""Findings in the filtered set with no ResourceFindingMapping entries."""
orphan_qs = filtered_queryset.filter(
~Exists(ResourceFindingMapping.objects.filter(finding_id=OuterRef("pk")))
)
if finding_ids is not None:
orphan_qs = orphan_qs.filter(id__in=finding_ids)
return orphan_qs
def _has_orphan_findings(self, filtered_queryset) -> bool:
"""Return True if any finding in the filtered set has no resource mapping."""
return self._orphan_findings_queryset(filtered_queryset).exists()
def _orphan_aggregation_values(self, orphan_queryset):
"""Raw rows for orphan findings; resource payload synthesized from metadata.
check_metadata is stored with lowercase keys (see
`prowler.lib.outputs.finding.Finding.get_metadata`) and
`Finding.resource_groups` is already denormalized at ingest time.
"""
return orphan_queryset.annotate(
_provider_type=F("scan__provider__provider"),
_provider_uid=F("scan__provider__uid"),
_provider_alias=F("scan__provider__alias"),
_svc=KeyTextTransform("servicename", "check_metadata"),
_region=KeyTextTransform("region", "check_metadata"),
_rtype=KeyTextTransform("resourcetype", "check_metadata"),
_rgroup=F("resource_groups"),
).values(
"id",
"uid",
"status",
"severity",
"delta",
"muted",
"muted_reason",
"first_seen_at",
"inserted_at",
"_provider_type",
"_provider_uid",
"_provider_alias",
"_svc",
"_region",
"_rtype",
"_rgroup",
)
def _post_process_resources(self, resource_data):
"""Convert resource aggregation rows to API output."""
results = []
@@ -7599,9 +7671,13 @@ class FindingGroupViewSet(BaseRLSViewSet):
else:
delta = None
resource_id = row["resource_id"]
finding_id = str(row["finding_id"]) if row.get("finding_id") else None
results.append(
{
"resource_id": row["resource_id"],
"row_id": resource_id,
"resource_id": resource_id,
"resource_uid": row["resource_uid"],
"resource_name": row["resource_name"],
"resource_service": row["resource_service"],
@@ -7620,9 +7696,46 @@ class FindingGroupViewSet(BaseRLSViewSet):
"muted": bool(row.get("muted", False)),
"muted_reason": row.get("muted_reason"),
"resource_group": row.get("resource_group", ""),
"finding_id": (
str(row["finding_id"]) if row.get("finding_id") else None
),
"finding_id": finding_id,
}
)
return results
def _post_process_orphans(self, orphan_rows):
"""Convert orphan finding rows into the same API shape as mapping rows."""
results = []
for row in orphan_rows:
status_val = row["status"]
status = status_val if status_val in ("FAIL", "PASS") else "MANUAL"
muted = bool(row["muted"])
delta_val = row.get("delta")
delta = delta_val if delta_val in ("new", "changed") and not muted else None
finding_id = str(row["id"])
results.append(
{
"row_id": finding_id,
"resource_id": None,
"resource_uid": row["uid"],
"resource_name": row["uid"],
"resource_service": row["_svc"] or "",
"resource_region": row["_region"] or "",
"resource_type": row["_rtype"] or "",
"provider_type": row["_provider_type"],
"provider_uid": row["_provider_uid"],
"provider_alias": row["_provider_alias"],
"status": status,
"severity": row["severity"],
"delta": delta,
"first_seen_at": row["first_seen_at"],
"last_seen_at": row["inserted_at"],
"muted": muted,
"muted_reason": row.get("muted_reason"),
"resource_group": row["_rgroup"] or "",
"finding_id": finding_id,
}
)
@@ -7683,16 +7796,14 @@ class FindingGroupViewSet(BaseRLSViewSet):
sort_param, self._FINDING_GROUP_SORT_MAP
)
if ordering:
# status_order is annotated on demand so groups can be sorted by
# their aggregated status (FAIL > PASS > MANUAL), mirroring the
# priority used in _post_process_aggregation. Counts are
# inclusive of muted findings, so the underlying check outcome
# surfaces even for fully muted groups.
if any(field.lstrip("-") == "status_order" for field in ordering):
aggregated_queryset = aggregated_queryset.annotate(
total_fail_for_sort=F("fail_count") + F("fail_muted_count"),
total_pass_for_sort=F("pass_count") + F("pass_muted_count"),
).annotate(
status_order=Case(
When(fail_count__gt=0, then=Value(3)),
When(pass_count__gt=0, then=Value(2)),
When(total_fail_for_sort__gt=0, then=Value(3)),
When(total_pass_for_sort__gt=0, then=Value(2)),
default=Value(1),
output_field=IntegerField(),
)
@@ -7731,41 +7842,65 @@ class FindingGroupViewSet(BaseRLSViewSet):
def _paginated_resource_response(
self, request, filtered_queryset, resource_ids, tenant_id
):
"""Paginate and return resources.
"""Paginate and return resources, appending orphan findings when present.
Without sort: paginate lightweight resource IDs first, aggregate only the page.
With sort: build a lightweight ordering subquery (resource_id + sort keys),
paginate that, then aggregate full details only for the page.
Hot path (no orphans, or resource filter applied): resources come from
ResourceFindingMapping aggregation. Untouched pre-existing behaviour.
Orphan fallback: findings without a mapping (e.g. IaC) are appended
after mapping rows as synthesised resource-like rows so they remain
visible in the UI without paying the aggregation cost on the hot path.
"""
sort_param = request.query_params.get("sort")
ordering = None
if sort_param:
ordering = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
if ordering:
if "resource_id" not in {field.lstrip("-") for field in ordering}:
ordering.append("resource_id")
validated = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
ordering = validated if validated else None
# Phase 1: lightweight aggregation with only sort keys, paginate
ordering_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=resource_ids,
tenant_id=tenant_id,
ordering=ordering,
)
page = self.paginate_queryset(ordering_qs)
if page is not None:
page_ids = [row["resource_id"] for row in page]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
# Re-sort to match the page ordering
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return self.get_paginated_response(serializer.data)
# Resource filters can only match findings with resources; skip orphan
# detection entirely when they are present.
if resource_ids is not None:
return self._mapping_paginated_response(
request, filtered_queryset, resource_ids, tenant_id, ordering
)
page_ids = [row["resource_id"] for row in ordering_qs]
# Serve the mapping response directly and piggyback on the paginator
# count to detect orphan-only groups, instead of paying a separate
# has_mappings.exists() semi-join over ResourceFindingMapping on
# every non-IaC request. TODO: once the ephemeral resources strategy
# is decided, mixed groups should route to _combined_paginated_response.
response = self._mapping_paginated_response(
request, filtered_queryset, resource_ids, tenant_id, ordering
)
page = getattr(self.paginator, "page", None)
mapping_total = page.paginator.count if page is not None else None
if mapping_total == 0:
# Pure orphan group (e.g. IaC): synthesize resource-like rows.
return self._combined_paginated_response(
request, filtered_queryset, tenant_id, ordering
)
return response
def _mapping_paginated_response(
self, request, filtered_queryset, resource_ids, tenant_id, ordering
):
"""Mapping-only paginated response (original fast path)."""
if ordering:
if "resource_id" not in {field.lstrip("-") for field in ordering}:
ordering.append("resource_id")
# Phase 1: lightweight aggregation with only sort keys, paginate
ordering_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=resource_ids,
tenant_id=tenant_id,
ordering=ordering,
)
page = self.paginate_queryset(ordering_qs)
if page is not None:
page_ids = [row["resource_id"] for row in page]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
@@ -7773,10 +7908,18 @@ class FindingGroupViewSet(BaseRLSViewSet):
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
return self.get_paginated_response(serializer.data)
page_ids = [row["resource_id"] for row in ordering_qs]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
# No sort (or only empty sort fragments): paginate lightweight resource IDs
# first, aggregate only the page.
mapping_qs = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=resource_ids, tenant_id=tenant_id
)
@@ -7804,6 +7947,95 @@ class FindingGroupViewSet(BaseRLSViewSet):
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
def _combined_paginated_response(
self, request, filtered_queryset, tenant_id, ordering
):
"""Mapping rows + orphan findings appended at end.
Orphans sit after mapping rows regardless of sort. This keeps the
mapping-only code path intact for checks that have no orphans (the
common case) and avoids paying UNION/coalesce costs there.
"""
mapping_qs = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=None, tenant_id=tenant_id
)
mapping_count = mapping_qs.values("resource_id").distinct().count()
orphan_ids = list(
self._orphan_findings_queryset(filtered_queryset)
.order_by("id")
.values_list("id", flat=True)
)
orphan_count = len(orphan_ids)
total = mapping_count + orphan_count
# Paginate a simple [0..total) index sequence so DRF produces proper
# links/meta; then slice mapping / orphan sources accordingly.
page = self.paginate_queryset(range(total))
page_indices = list(page) if page is not None else list(range(total))
mapping_indices = [i for i in page_indices if i < mapping_count]
orphan_positions = [
i - mapping_count for i in page_indices if i >= mapping_count
]
mapping_results = []
if mapping_indices:
start = mapping_indices[0]
stop = mapping_indices[-1] + 1
if ordering:
ordering_fields = list(ordering)
if "resource_id" not in {
field.lstrip("-") for field in ordering_fields
}:
ordering_fields.append("resource_id")
ordered_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=None,
tenant_id=tenant_id,
ordering=ordering_fields,
)
slice_rids = [row["resource_id"] for row in ordered_qs[start:stop]]
else:
slice_rids = list(
mapping_qs.values_list("resource_id", flat=True)
.distinct()
.order_by("resource_id")[start:stop]
)
if slice_rids:
resource_data = self._build_resource_aggregation(
filtered_queryset,
resource_ids=slice_rids,
tenant_id=tenant_id,
)
rows_by_rid = {row["resource_id"]: row for row in resource_data}
ordered_rows = [
rows_by_rid[rid] for rid in slice_rids if rid in rows_by_rid
]
mapping_results = self._post_process_resources(ordered_rows)
orphan_results = []
if orphan_positions:
slice_fids = [orphan_ids[pos] for pos in orphan_positions]
raw_rows = list(
self._orphan_aggregation_values(
self._orphan_findings_queryset(
filtered_queryset, finding_ids=slice_fids
)
)
)
rows_by_fid = {row["id"]: row for row in raw_rows}
ordered_rows = [
rows_by_fid[fid] for fid in slice_fids if fid in rows_by_fid
]
orphan_results = self._post_process_orphans(ordered_rows)
results = mapping_results + orphan_results
serializer = FindingGroupResourceSerializer(results, many=True)
if page is not None:
return self.get_paginated_response(serializer.data)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
"""
List finding groups with aggregation and filtering.
@@ -7933,10 +8165,13 @@ class FindingGroupViewSet(BaseRLSViewSet):
tenant_id = request.tenant_id
queryset = self._get_finding_queryset()
# Get latest completed scan for each provider
# Order by -completed_at (matching the /latest summary path and the
# daily summary upsert keyed on midnight(completed_at)) so that
# overlapping scans do not make /resources and /latest read from
# different scans and report diverging counts.
latest_scan_ids = (
Scan.objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
.order_by("provider_id", "-inserted_at")
.order_by("provider_id", "-completed_at", "-inserted_at")
.distinct("provider_id")
.values_list("id", flat=True)
)
+3 -1
View File
@@ -17,8 +17,10 @@ celery_app.config_from_object("django.conf:settings", namespace="CELERY")
celery_app.conf.update(result_extended=True, result_expires=None)
celery_app.conf.broker_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT,
"queue_order_strategy": "priority",
}
celery_app.conf.task_default_priority = 6
celery_app.conf.result_backend_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
}
+46 -10
View File
@@ -1,6 +1,8 @@
# Portions of this file are based on code from the Cartography project
# (https://github.com/cartography-cncf/cartography), which is licensed under the Apache 2.0 License.
import time
from typing import Any
import aioboto3
@@ -33,7 +35,7 @@ def start_aws_ingestion(
For the scan progress updates:
- The caller of this function (`tasks.jobs.attack_paths.scan.run`) has set it to 2.
- When the control returns to the caller, it will be set to 95.
- When the control returns to the caller, it will be set to 93.
"""
# Initialize variables common to all jobs
@@ -89,34 +91,50 @@ def start_aws_ingestion(
logger.info(
f"Syncing function permission_relationships for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.RESOURCE_FUNCTIONS["permission_relationships"](**sync_args)
logger.info(
f"Synced function permission_relationships for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 88)
if "resourcegroupstaggingapi" in requested_syncs:
logger.info(
f"Syncing function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.RESOURCE_FUNCTIONS["resourcegroupstaggingapi"](**sync_args)
logger.info(
f"Synced function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 89)
logger.info(
f"Syncing ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_ec2_iaminstanceprofile.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 90)
logger.info(
f"Syncing lambda_ecr analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_analysis_job(
"aws_lambda_ecr.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lambda_ecr analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
if all(
s in requested_syncs
@@ -125,25 +143,34 @@ def start_aws_ingestion(
logger.info(
f"Syncing lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_lb_container_exposure.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
if all(s in requested_syncs for s in ["ec2:network_acls", "ec2:load_balancer_v2"]):
logger.info(
f"Syncing lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_lb_nacl_direct.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 91)
logger.info(f"Syncing metadata for AWS account {prowler_api_provider.uid}")
t0 = time.perf_counter()
cartography_aws.merge_module_sync_metadata(
neo4j_session,
group_type="AWSAccount",
@@ -152,24 +179,23 @@ def start_aws_ingestion(
update_tag=cartography_config.update_tag,
stat_handler=cartography_aws.stat_handler,
)
logger.info(
f"Synced metadata for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 92)
# Removing the added extra field
del common_job_parameters["AWS_ID"]
logger.info(f"Syncing cleanup_job for AWS account {prowler_api_provider.uid}")
cartography_aws.run_cleanup_job(
"aws_post_ingestion_principals_cleanup.json",
neo4j_session,
common_job_parameters,
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
logger.info(f"Syncing analysis for AWS account {prowler_api_provider.uid}")
t0 = time.perf_counter()
cartography_aws._perform_aws_analysis(
requested_syncs, neo4j_session, common_job_parameters
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94)
logger.info(
f"Synced analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
return failed_syncs
@@ -234,6 +260,8 @@ def sync_aws_account(
)
try:
func_t0 = time.perf_counter()
# `ecr:image_layers` uses `aioboto3_session` instead of `boto3_session`
if func_name == "ecr:image_layers":
cartography_aws.RESOURCE_FUNCTIONS[func_name](
@@ -257,7 +285,15 @@ def sync_aws_account(
else:
cartography_aws.RESOURCE_FUNCTIONS[func_name](**sync_args)
logger.info(
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s"
)
except Exception as e:
logger.info(
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s (FAILED)"
)
exception_message = utils.stringify_exception(
e, f"Exception for AWS sync function: {func_name}"
)
@@ -8,9 +8,9 @@ from tasks.jobs.attack_paths import aws
# Batch size for Neo4j write operations (resource labeling, cleanup)
BATCH_SIZE = env.int("ATTACK_PATHS_BATCH_SIZE", 1000)
# Batch size for Postgres findings fetch (keyset pagination page size)
FINDINGS_BATCH_SIZE = env.int("ATTACK_PATHS_FINDINGS_BATCH_SIZE", 500)
FINDINGS_BATCH_SIZE = env.int("ATTACK_PATHS_FINDINGS_BATCH_SIZE", 1000)
# Batch size for temp-to-tenant graph sync (nodes and relationships per cursor page)
SYNC_BATCH_SIZE = env.int("ATTACK_PATHS_SYNC_BATCH_SIZE", 250)
SYNC_BATCH_SIZE = env.int("ATTACK_PATHS_SYNC_BATCH_SIZE", 1000)
# Neo4j internal labels (Prowler-specific, not provider-specific)
# - `Internet`: Singleton node representing external internet access for exposed-resource queries
@@ -12,6 +12,7 @@ from typing import Any, Generator
from uuid import UUID
import neo4j
from cartography.config import Config as CartographyConfig
from celery.utils.log import get_task_logger
from tasks.jobs.attack_paths.config import (
@@ -86,17 +87,21 @@ def analysis(
prowler_api_provider: Provider,
scan_id: str,
config: CartographyConfig,
) -> None:
) -> tuple[int, int]:
"""
Main entry point for Prowler findings analysis.
Adds resource labels and loads findings.
Returns (labeled_nodes, findings_loaded).
"""
add_resource_label(
total_labeled = add_resource_label(
neo4j_session, prowler_api_provider.provider, str(prowler_api_provider.uid)
)
findings_data = stream_findings_with_resources(prowler_api_provider, scan_id)
load_findings(neo4j_session, findings_data, prowler_api_provider, config)
total_loaded = load_findings(
neo4j_session, findings_data, prowler_api_provider, config
)
return total_labeled, total_loaded
def add_resource_label(
@@ -146,12 +151,11 @@ def load_findings(
findings_batches: Generator[list[dict[str, Any]], None, None],
prowler_api_provider: Provider,
config: CartographyConfig,
) -> None:
) -> int:
"""Load Prowler findings into the graph, linking them to resources."""
query = render_cypher_template(
INSERT_FINDING_TEMPLATE,
{
"__ROOT_NODE_LABEL__": get_root_node_label(prowler_api_provider.provider),
"__NODE_UID_FIELD__": get_node_uid_field(prowler_api_provider.provider),
"__RESOURCE_LABEL__": get_provider_resource_label(
prowler_api_provider.provider
@@ -160,7 +164,6 @@ def load_findings(
)
parameters = {
"provider_uid": str(prowler_api_provider.uid),
"last_updated": config.update_tag,
"prowler_version": ProwlerConfig.prowler_version,
}
@@ -178,6 +181,7 @@ def load_findings(
neo4j_session.run(query, parameters)
logger.info(f"Finished loading {total_records} records in {batch_num} batches")
return total_records
# Findings Streaming (Generator-based)
@@ -248,7 +252,9 @@ def _fetch_findings_batch(
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
# Use `all_objects` to get `Findings` even on soft-deleted `Providers`
# But even the provider is already validated as active in this context
qs = FindingModel.all_objects.filter(scan_id=scan_id).order_by("id")
qs = FindingModel.all_objects.filter(
tenant_id=tenant_id, scan_id=scan_id
).order_by("id")
if after_id is not None:
qs = qs.filter(id__gt=after_id)
@@ -32,17 +32,14 @@ ADD_RESOURCE_LABEL_TEMPLATE = """
"""
INSERT_FINDING_TEMPLATE = f"""
MATCH (account:__ROOT_NODE_LABEL__ {{id: $provider_uid}})
UNWIND $findings_data AS finding_data
OPTIONAL MATCH (account)-->(resource_by_uid:__RESOURCE_LABEL__)
WHERE resource_by_uid.__NODE_UID_FIELD__ = finding_data.resource_uid
WITH account, finding_data, resource_by_uid
OPTIONAL MATCH (resource_by_uid:__RESOURCE_LABEL__ {{__NODE_UID_FIELD__: finding_data.resource_uid}})
WITH finding_data, resource_by_uid
OPTIONAL MATCH (account)-->(resource_by_id:__RESOURCE_LABEL__)
OPTIONAL MATCH (resource_by_id:__RESOURCE_LABEL__ {{id: finding_data.resource_uid}})
WHERE resource_by_uid IS NULL
AND resource_by_id.id = finding_data.resource_uid
WITH account, finding_data, COALESCE(resource_by_uid, resource_by_id) AS resource
WITH finding_data, COALESCE(resource_by_uid, resource_by_id) AS resource
WHERE resource IS NOT NULL
MERGE (finding:{PROWLER_FINDING_LABEL} {{id: finding_data.id}})
+38 -10
View File
@@ -55,6 +55,7 @@ exception propagates to Celery.
import logging
import time
from typing import Any
from cartography.config import Config as CartographyConfig
@@ -144,6 +145,12 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
attack_paths_scan, task_id, tenant_cartography_config
)
scan_t0 = time.perf_counter()
logger.info(
f"Starting Attack Paths scan ({attack_paths_scan.id}) for "
f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}"
)
subgraph_dropped = False
sync_completed = False
provider_gated = False
@@ -169,6 +176,7 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 2)
# The real scan, where iterates over cloud services
t0 = time.perf_counter()
ingestion_exceptions = utils.call_within_event_loop(
cartography_ingestion_function,
tmp_neo4j_session,
@@ -177,19 +185,23 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
prowler_sdk_provider,
attack_paths_scan,
)
logger.info(
f"Cartography ingestion completed in {time.perf_counter() - t0:.3f}s "
f"(failed_syncs={len(ingestion_exceptions)})"
)
# Post-processing: Just keeping it to be more Cartography compliant
logger.info(
f"Syncing Cartography ontology for AWS account {prowler_api_provider.uid}"
)
cartography_ontology.run(tmp_neo4j_session, tmp_cartography_config)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 95)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94)
logger.info(
f"Syncing Cartography analysis for AWS account {prowler_api_provider.uid}"
)
cartography_analysis.run(tmp_neo4j_session, tmp_cartography_config)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 96)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 95)
# Creating Internet node and CAN_ACCESS relationships
logger.info(
@@ -198,14 +210,20 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
internet.analysis(
tmp_neo4j_session, prowler_api_provider, tmp_cartography_config
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 96)
# Adding Prowler Finding nodes and relationships
logger.info(
f"Syncing Prowler analysis for AWS account {prowler_api_provider.uid}"
)
findings.analysis(
t0 = time.perf_counter()
labeled_nodes, findings_loaded = findings.analysis(
tmp_neo4j_session, prowler_api_provider, scan_id, tmp_cartography_config
)
logger.info(
f"Prowler analysis completed in {time.perf_counter() - t0:.3f}s "
f"(findings={findings_loaded}, labeled_nodes={labeled_nodes})"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 97)
logger.info(
@@ -227,22 +245,33 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
logger.info(f"Deleting existing provider graph in {tenant_database_name}")
db_utils.set_provider_graph_data_ready(attack_paths_scan, False)
provider_gated = True
graph_database.drop_subgraph(
t0 = time.perf_counter()
deleted_nodes = graph_database.drop_subgraph(
database=tenant_database_name,
provider_id=str(prowler_api_provider.id),
)
logger.info(
f"Deleted existing provider graph in {time.perf_counter() - t0:.3f}s "
f"(deleted_nodes={deleted_nodes})"
)
subgraph_dropped = True
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 98)
logger.info(
f"Syncing graph from {tmp_database_name} into {tenant_database_name}"
)
sync.sync_graph(
t0 = time.perf_counter()
sync_result = sync.sync_graph(
source_database=tmp_database_name,
target_database=tenant_database_name,
tenant_id=str(prowler_api_provider.tenant_id),
provider_id=str(prowler_api_provider.id),
)
logger.info(
f"Synced graph in {time.perf_counter() - t0:.3f}s "
f"(nodes={sync_result['nodes']}, relationships={sync_result['relationships']})"
)
sync_completed = True
db_utils.set_graph_data_ready(attack_paths_scan, True)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 99)
@@ -250,17 +279,16 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
logger.info(f"Clearing Neo4j cache for database {tenant_database_name}")
graph_database.clear_cache(tenant_database_name)
logger.info(
f"Completed Cartography ({attack_paths_scan.id}) for "
f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}"
)
logger.info(f"Dropping temporary Neo4j database {tmp_database_name}")
graph_database.drop_database(tmp_database_name)
db_utils.finish_attack_paths_scan(
attack_paths_scan, StateChoices.COMPLETED, ingestion_exceptions
)
logger.info(
f"Attack Paths scan completed in {time.perf_counter() - scan_t0:.3f}s "
f"(state=completed, failed_syncs={len(ingestion_exceptions)})"
)
return ingestion_exceptions
except Exception as e:
@@ -5,6 +5,8 @@ This module handles syncing graph data from temporary scan databases
to the tenant database, adding provider isolation labels and properties.
"""
import time
from collections import defaultdict
from typing import Any
@@ -81,6 +83,7 @@ def sync_nodes(
Source and target sessions are opened sequentially per batch to avoid
holding two Bolt connections simultaneously for the entire sync duration.
"""
t0 = time.perf_counter()
last_id = -1
total_synced = 0
@@ -117,7 +120,7 @@ def sync_nodes(
total_synced += batch_count
logger.info(
f"Synced {total_synced} nodes from {source_database} to {target_database}"
f"Synced {total_synced} nodes from {source_database} to {target_database} in {time.perf_counter() - t0:.3f}s"
)
return total_synced
@@ -136,6 +139,7 @@ def sync_relationships(
Source and target sessions are opened sequentially per batch to avoid
holding two Bolt connections simultaneously for the entire sync duration.
"""
t0 = time.perf_counter()
last_id = -1
total_synced = 0
@@ -166,7 +170,7 @@ def sync_relationships(
total_synced += batch_count
logger.info(
f"Synced {total_synced} relationships from {source_database} to {target_database}"
f"Synced {total_synced} relationships from {source_database} to {target_database} in {time.perf_counter() - t0:.3f}s"
)
return total_synced
+15 -9
View File
@@ -752,11 +752,19 @@ def _process_finding_micro_batch(
)
if mappings_to_create:
ResourceFindingMapping.objects.bulk_create(
created_mappings = ResourceFindingMapping.objects.bulk_create(
mappings_to_create,
batch_size=SCAN_DB_BATCH_SIZE,
ignore_conflicts=True,
unique_fields=["tenant_id", "resource_id", "finding_id"],
)
inserted = sum(1 for m in created_mappings if m.pk)
if inserted != len(mappings_to_create):
logger.error(
f"scan {scan_instance.id}: expected "
f"{len(mappings_to_create)} ResourceFindingMapping rows, "
f"inserted {inserted}. Rolling back micro-batch."
)
# Update finding denormalized arrays
findings_to_update = []
@@ -1804,11 +1812,9 @@ def aggregate_finding_group_summaries(tenant_id: str, scan_id: str):
)
# Aggregate findings by check_id for this scan.
# `pass_count`, `fail_count` and `manual_count` count *every* finding
# in this group, regardless of mute state, so the aggregated `status`
# always reflects the underlying check outcome (FAIL > PASS > MANUAL)
# even when the group is fully muted. The orthogonal `muted` flag is
# what tells whether the group has any actionable (non-muted) findings.
# `pass_count`, `fail_count` and `manual_count` only count non-muted
# findings. Muted findings are tracked separately via the
# `*_muted_count` fields.
aggregated = (
Finding.objects.filter(
tenant_id=tenant_id,
@@ -1817,9 +1823,9 @@ def aggregate_finding_group_summaries(tenant_id: str, scan_id: str):
.values("check_id")
.annotate(
severity_order=Max(severity_case),
pass_count=Count("id", filter=Q(status="PASS")),
fail_count=Count("id", filter=Q(status="FAIL")),
manual_count=Count("id", filter=Q(status="MANUAL")),
pass_count=Count("id", filter=Q(status="PASS", muted=False)),
fail_count=Count("id", filter=Q(status="FAIL", muted=False)),
manual_count=Count("id", filter=Q(status="MANUAL", muted=False)),
pass_muted_count=Count("id", filter=Q(status="PASS", muted=True)),
fail_muted_count=Count("id", filter=Q(status="FAIL", muted=True)),
manual_muted_count=Count("id", filter=Q(status="MANUAL", muted=True)),
@@ -38,11 +38,14 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.finish_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph", return_value=0)
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -188,7 +191,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -287,7 +290,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -390,7 +393,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -489,14 +492,17 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch(
"tasks.jobs.attack_paths.scan.graph_database.drop_subgraph",
side_effect=RuntimeError("drop failed"),
)
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -609,7 +615,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph")
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -718,11 +724,14 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph")
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -833,14 +842,17 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch(
"tasks.jobs.attack_paths.scan.graph_database.drop_subgraph",
side_effect=RuntimeError("drop failed"),
)
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -1274,10 +1286,6 @@ class TestAttackPathsFindingsHelpers:
mock_session = MagicMock()
with (
patch(
"tasks.jobs.attack_paths.findings.get_root_node_label",
return_value="AWSAccount",
),
patch(
"tasks.jobs.attack_paths.findings.get_node_uid_field",
return_value="arn",
@@ -1294,7 +1302,6 @@ class TestAttackPathsFindingsHelpers:
assert mock_session.run.call_count == 2
for call_args in mock_session.run.call_args_list:
params = call_args.args[1]
assert params["provider_uid"] == str(provider.uid)
assert params["last_updated"] == config.update_tag
assert "findings_data" in params
@@ -1673,10 +1680,6 @@ class TestAttackPathsFindingsHelpers:
yield # Make it a generator
with (
patch(
"tasks.jobs.attack_paths.findings.get_root_node_label",
return_value="AWSAccount",
),
patch(
"tasks.jobs.attack_paths.findings.get_node_uid_field",
return_value="arn",
@@ -15,8 +15,7 @@ This document describes the internal architecture of Prowler Lighthouse AI, enab
Lighthouse AI operates as a Langchain-based agent that connects Large Language Models (LLMs) with Prowler security data through the Model Context Protocol (MCP).
<img className="block dark:hidden" src="/images/lighthouse-architecture-light.png" alt="Prowler Lighthouse Architecture" />
<img className="hidden dark:block" src="/images/lighthouse-architecture-dark.png" alt="Prowler Lighthouse Architecture" />
![Prowler Lighthouse Architecture](/images/lighthouse-architecture.png)
### Three-Tier Architecture
+19
View File
@@ -12,6 +12,24 @@
"dark": "/images/prowler-logo-white.png",
"light": "/images/prowler-logo-black.png"
},
"contextual": {
"options": [
"copy",
"view",
{
"title": "Request a feature",
"description": "Open a feature request on GitHub",
"icon": "plus",
"href": "https://github.com/prowler-cloud/prowler/issues/new?template=feature-request.yml"
},
{
"title": "Report an issue",
"description": "Open a bug report on GitHub",
"icon": "bug",
"href": "https://github.com/prowler-cloud/prowler/issues/new?template=bug_report.yml"
}
]
},
"navigation": {
"tabs": [
{
@@ -133,6 +151,7 @@
]
},
"user-guide/tutorials/prowler-app-attack-paths",
"user-guide/tutorials/prowler-app-finding-groups",
"user-guide/tutorials/prowler-cloud-public-ips",
{
"group": "Tutorials",
@@ -121,8 +121,8 @@ To update the environment file:
Edit the `.env` file and change version values:
```env
PROWLER_UI_VERSION="5.23.0"
PROWLER_API_VERSION="5.23.0"
PROWLER_UI_VERSION="5.24.1"
PROWLER_API_VERSION="5.24.1"
```
<Note>
@@ -59,6 +59,10 @@ Prowler Lighthouse AI is powerful, but there are limitations:
- **NextJS session dependence**: If your Prowler application session expires or logs out, Lighthouse AI will error out. Refresh and log back in to continue.
- **Response quality**: The response quality depends on the selected LLM provider and model. Choose models with strong tool-calling capabilities for best results. We recommend `gpt-5` model from OpenAI.
## Architecture
![Prowler Lighthouse Architecture](/images/lighthouse-architecture.png)
## Extending Lighthouse AI
Lighthouse AI retrieves data through Prowler MCP. To add new capabilities, extend the Prowler MCP Server with additional tools and Lighthouse AI discovers them automatically.
@@ -46,8 +46,7 @@ Search and retrieve official Prowler documentation:
The following diagram illustrates the Prowler MCP Server architecture and its integration points:
<img className="block dark:hidden" src="/images/prowler_mcp_schema_light.png" alt="Prowler MCP Server Schema" />
<img className="hidden dark:block" src="/images/prowler_mcp_schema_dark.png" alt="Prowler MCP Server Schema" />
![Prowler MCP Server Schema](/images/prowler_mcp_schema.png)
The architecture shows how AI assistants connect through the MCP protocol to access Prowler's three main components:
- Prowler Cloud/App for security operations
Binary file not shown.

After

Width:  |  Height:  |  Size: 755 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 146 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 340 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 410 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 267 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 265 KiB

+37
View File
@@ -0,0 +1,37 @@
flowchart TB
browser([Browser])
subgraph NEXTJS["Next.js Server"]
route["API Route<br/>(auth + context assembly)"]
agent["LangChain Agent"]
subgraph TOOLS["Agent Tools"]
metatools["Meta-tools<br/>describe_tool / execute_tool / load_skill"]
end
mcpclient["MCP Client<br/>(HTTP transport)"]
end
llm["LLM Provider<br/>(OpenAI / Bedrock / OpenAI-compatible)"]
subgraph MCP["Prowler MCP Server"]
app_tools["prowler_app_* tools<br/>(auth required)"]
hub_tools["prowler_hub_* tools<br/>(no auth)"]
docs_tools["prowler_docs_* tools<br/>(no auth)"]
end
api["Prowler API"]
hub["hub.prowler.com"]
docs["docs.prowler.com<br/>(Mintlify)"]
browser <-->|SSE stream| route
route --> agent
agent <-->|LLM API| llm
agent --> metatools
metatools --> mcpclient
mcpclient -->|MCP HTTP · Bearer token<br/>for prowler_app_* only| app_tools
mcpclient -->|MCP HTTP| hub_tools
mcpclient -->|MCP HTTP| docs_tools
app_tools -->|REST| api
hub_tools -->|REST| hub
docs_tools -->|REST| docs
Binary file not shown.

After

Width:  |  Height:  |  Size: 286 KiB

@@ -23,6 +23,8 @@ flowchart TB
user --> ui
user --> cli
ui -->|REST| api
ui -->|MCP HTTP| mcp
mcp -->|REST| api
api --> pg
api --> valkey
beat -->|enqueue jobs| valkey
@@ -31,7 +33,5 @@ flowchart TB
worker -->|Attack Paths| neo4j
worker -->|invokes| sdk
cli --> sdk
api -. AI tools .-> mcp
mcp -. context .-> api
sdk --> providers
Binary file not shown.

Before

Width:  |  Height:  |  Size: 268 KiB

After

Width:  |  Height:  |  Size: 348 KiB

+29
View File
@@ -0,0 +1,29 @@
flowchart LR
subgraph HOSTS["MCP Hosts"]
chat["Chat Interfaces<br/>(Claude Desktop, LobeChat)"]
ide["IDEs and Code Editors<br/>(Claude Code, Cursor)"]
apps["Other AI Applications<br/>(5ire, custom agents)"]
end
subgraph MCP["Prowler MCP Server"]
app_tools["prowler_app_* tools<br/>(JWT or API key auth)<br/>Findings · Providers · Scans<br/>Resources · Muting · Compliance<br/>Attack Paths"]
hub_tools["prowler_hub_* tools<br/>(no auth)<br/>Checks Catalog · Check Code<br/>Fixers · Compliance Frameworks"]
docs_tools["prowler_docs_* tools<br/>(no auth)<br/>Search · Document Retrieval"]
end
api["Prowler API<br/>(REST)"]
hub["hub.prowler.com<br/>(REST)"]
docs["docs.prowler.com<br/>(Mintlify)"]
chat -->|STDIO or HTTP| app_tools
chat -->|STDIO or HTTP| hub_tools
chat -->|STDIO or HTTP| docs_tools
ide -->|STDIO or HTTP| app_tools
ide -->|STDIO or HTTP| hub_tools
ide -->|STDIO or HTTP| docs_tools
apps -->|STDIO or HTTP| app_tools
apps -->|STDIO or HTTP| hub_tools
apps -->|STDIO or HTTP| docs_tools
app_tools -->|REST| api
hub_tools -->|REST| hub
docs_tools -->|REST| docs
Binary file not shown.

After

Width:  |  Height:  |  Size: 371 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 328 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 332 KiB

@@ -33,6 +33,41 @@ To scan a particular AWS region with Prowler, use:
prowler aws -f/--region eu-west-1 us-east-1
```
### Excluding Specific Regions
To scan all supported AWS regions except a specific subset, use the `--excluded-region` flag:
```console
prowler aws --excluded-region eu-west-1 me-south-1
```
You can also configure the exclusion list with the `PROWLER_AWS_DISALLOWED_REGIONS` environment variable as a comma-separated list:
```console
export PROWLER_AWS_DISALLOWED_REGIONS="eu-west-1,me-south-1"
prowler aws
```
Or with the AWS provider configuration in `config.yaml`:
```yaml
aws:
disallowed_regions:
- eu-west-1
- me-south-1
```
When more than one source is set, precedence is:
1. `--excluded-region`
2. `PROWLER_AWS_DISALLOWED_REGIONS`
3. `aws.disallowed_regions` in `config.yaml`
<Note>
For self-hosted App or API-triggered scans, set `PROWLER_AWS_DISALLOWED_REGIONS` in the runtime environment of the backend scan containers such as `api` and `worker`. The `ui` container does not enforce AWS region selection.
</Note>
### AWS Credentials Configuration
For details on configuring AWS credentials, refer to the following [Botocore](https://github.com/boto/botocore) [file](https://github.com/boto/botocore/blob/22a19ea7c4c2c4dd7df4ab8c32733cba0c7597a4/botocore/data/partitions.json).
@@ -0,0 +1,119 @@
---
title: 'Finding Groups'
description: 'Organize and triage security findings by check to reduce noise and prioritize remediation effectively.'
---
import { VersionBadge } from "/snippets/version-badge.mdx"
<VersionBadge version="5.23.0" />
Finding Groups transforms security findings triage by grouping them by check instead of displaying a flat list. This dramatically reduces noise and enables faster, more effective prioritization.
## Triage Challenges with Flat Finding Lists
A real cloud environment produces thousands of findings per scan. A flat list makes it impossible to triage effectively:
- **Signal buried in noise**: the same misconfiguration repeated across 200 resources shows up as 200 rows, burying the signal in repetitive data
- **Prioritization guesswork**: without grouping, understanding which issues affect the most resources requires manual counting and correlation
- **Tedious muting**: muting a false positive globally requires manually acting on each individual finding across the list
- **Lost context**: when investigating a single resource, related findings are scattered across the same flat list, making it hard to see the full picture
## How Finding Groups Addresses These Challenges
Finding Groups addresses these challenges by intelligently grouping findings by check.
### Grouped View at a Glance
Each row represents a single check title with key information immediately visible:
- **Severity** indicator for quick risk assessment
- **Impacted providers** showing which cloud platforms are affected
- **X of Y impacted resources** counter displaying how many resources fail this check
For example, `Vercel project has the Web Application Firewall enabled` across every affected project collapses to a single row — not one per project. Sort or filter by severity, provider, or status at the group level to triage top-down instead of drowning in per-resource rows.
![Finding Groups list view](/images/finding-groups-list.png)
### Expanding Groups for Details
Expand any group inline to see the failing resources with detailed information:
| Column | Description |
|--------|-------------|
| **UID** | Unique identifier for the resource |
| **Service** | The cloud service the resource belongs to |
| **Region** | Geographic region where the resource is deployed |
| **Severity** | Risk level of the finding |
| **Provider** | Cloud provider (AWS, Azure, GCP, Kubernetes, etc.) |
| **Last Seen** | When the finding was last detected |
| **Failing For** | Duration the resource has been in a failing state |
![Finding Groups expanded view](/images/finding-groups-expanded.png)
### Resource Detail Drawer
Select any resource to open the detail drawer with full finding context:
- **Risk**: the security risk associated with this finding
- **Description**: detailed explanation of what was detected
- **Status Extended**: additional status information and context
- **Remediation**: step-by-step guidance to resolve the issue
- **View in Prowler Hub**: direct link to explore the check in Prowler Hub
- **Analyze This Finding With Lighthouse AI**: one-click AI-powered analysis for deeper insights
![Finding Groups resource detail drawer](/images/finding-groups-drawer.png)
### Bulk Actions
Bulk-mute an entire group instead of chasing duplicates across the list. This is especially useful for:
- Known false positives that appear across many resources
- Findings in development or test environments
- Accepted risks that have been documented and approved
<Warning>
Muting findings does not resolve underlying security issues. Review each finding carefully before muting to ensure it represents an acceptable risk or has been properly addressed.
</Warning>
## Other Findings for This Resource
Inside the resource detail drawer, the **Other Findings For This Resource** tab lists every finding that hits the same resource — passing, failing, and muted — alongside the one currently being reviewed.
![Other Findings For This Resource tab](/images/finding-groups-other-findings.png)
### Why This Matters
When reviewing "WAF not enabled" on a Vercel project, the tab immediately shows:
- Skew protection status
- Rate limiting configuration
- IP blocking settings
- Custom firewall rules
- Password protection findings
All for that same project, without navigating back to the main list and filtering by resource UID.
### Complete Context Within the Drawer
Pair the Other Findings tab with:
- **Scans tab**: scan history for this resource
- **Events tab**: changes and events over time
This provides full context without leaving the drawer.
## Best Practices
1. **Start with high severity groups**: focus on critical and high severity groups first for maximum impact.
2. **Use filters strategically**: filter by provider or status at the group level to narrow the triage scope.
3. **Leverage bulk mute**: when a finding represents a confirmed false positive, mute the entire group at once.
4. **Check related findings**: review the Other Findings tab to understand the full security posture of a resource.
5. **Track failure duration**: use the "Failing For" column to prioritize long-standing issues that may indicate systemic problems.
## Getting Started
1. Navigate to the **Findings** section in Prowler Cloud/App.
2. Toggle to the **Grouped View** to see findings organized by check.
3. Select any group row to expand and see affected resources.
4. Select a resource to open the detail drawer with full context.
5. Use the **Other Findings For This Resource** tab to see all findings for that resource.
@@ -25,8 +25,7 @@ Behind the scenes, Lighthouse AI works as follows:
Lighthouse AI supports multiple LLM providers including OpenAI, Amazon Bedrock, and OpenAI-compatible services. For configuration details, see [Using Multiple LLM Providers with Lighthouse](/user-guide/tutorials/prowler-app-lighthouse-multi-llm).
</Note>
<img className="block dark:hidden" src="/images/lighthouse-architecture-light.png" alt="Prowler Lighthouse Architecture" />
<img className="hidden dark:block" src="/images/lighthouse-architecture-dark.png" alt="Prowler Lighthouse Architecture" />
![Prowler Lighthouse Architecture](/images/lighthouse-architecture.png)
<Note>
Generated
+74 -20
View File
@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.3.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -1267,19 +1267,19 @@ typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-resource"
version = "23.3.0"
version = "24.0.0"
description = "Microsoft Azure Resource Management Client Library for Python"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_mgmt_resource-23.3.0-py3-none-any.whl", hash = "sha256:ab216ee28e29db6654b989746e0c85a1181f66653929d2cb6e48fba66d9af323"},
{file = "azure_mgmt_resource-23.3.0.tar.gz", hash = "sha256:fc4f1fd8b6aad23f8af4ed1f913df5f5c92df117449dc354fea6802a2829fea4"},
{file = "azure_mgmt_resource-24.0.0-py3-none-any.whl", hash = "sha256:27b32cd223e2784269f5a0db3c282042886ee4072d79cedc638438ece7cd0df4"},
{file = "azure_mgmt_resource-24.0.0.tar.gz", hash = "sha256:cf6b8995fcdd407ac9ff1dd474087129429a1d90dbb1ac77f97c19b96237b265"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
azure-mgmt-core = ">=1.5.0"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
@@ -1425,6 +1425,64 @@ typing-extensions = ">=4.6.0"
[package.extras]
aio = ["azure-core[aio] (>=1.30.0)"]
[[package]]
name = "backports-datetime-fromisoformat"
version = "2.0.3"
description = "Backport of Python 3.11's datetime.fromisoformat"
optional = false
python-versions = ">3"
groups = ["dev"]
markers = "python_version == \"3.10\""
files = [
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f681f638f10588fa3c101ee9ae2b63d3734713202ddfcfb6ec6cea0778a29d4"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:cd681460e9142f1249408e5aee6d178c6d89b49e06d44913c8fdfb6defda8d1c"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ee68bc8735ae5058695b76d3bb2aee1d137c052a11c8303f1e966aa23b72b65b"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8273fe7932db65d952a43e238318966eab9e49e8dd546550a41df12175cc2be4"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39d57ea50aa5a524bb239688adc1d1d824c31b6094ebd39aa164d6cadb85de22"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ac6272f87693e78209dc72e84cf9ab58052027733cd0721c55356d3c881791cf"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:44c497a71f80cd2bcfc26faae8857cf8e79388e3d5fbf79d2354b8c360547d58"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:6335a4c9e8af329cb1ded5ab41a666e1448116161905a94e054f205aa6d263bc"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2e4b66e017253cdbe5a1de49e0eecff3f66cd72bcb1229d7db6e6b1832c0443"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:43e2d648e150777e13bbc2549cc960373e37bf65bd8a5d2e0cef40e16e5d8dd0"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:4ce6326fd86d5bae37813c7bf1543bae9e4c215ec6f5afe4c518be2635e2e005"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7c8fac333bf860208fd522a5394369ee3c790d0aa4311f515fcc4b6c5ef8d75"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4da5ab3aa0cc293dc0662a0c6d1da1a011dc1edcbc3122a288cfed13a0b45"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58ea11e3bf912bd0a36b0519eae2c5b560b3cb972ea756e66b73fb9be460af01"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8a375c7dbee4734318714a799b6c697223e4bbb57232af37fbfff88fb48a14c6"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:ac677b1664c4585c2e014739f6678137c8336815406052349c85898206ec7061"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66ce47ee1ba91e146149cf40565c3d750ea1be94faf660ca733d8601e0848147"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8b7e069910a66b3bba61df35b5f879e5253ff0821a70375b9daf06444d046fa4"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:a3b5d1d04a9e0f7b15aa1e647c750631a873b298cdd1255687bb68779fe8eb35"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1b95986430e789c076610aea704db20874f0781b8624f648ca9fb6ef67c6e1"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffe5f793db59e2f1d45ec35a1cf51404fdd69df9f6952a0c87c3060af4c00e32"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:620e8e73bd2595dfff1b4d256a12b67fce90ece3de87b38e1dde46b910f46f4d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4cf9c0a985d68476c1cabd6385c691201dda2337d7453fb4da9679ce9f23f4e7"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:d144868a73002e6e2e6fef72333e7b0129cecdd121aa8f1edba7107fd067255d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e81b26497a17c29595bc7df20bc6a872ceea5f8c9d6537283945d4b6396aec10"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:5ba00ead8d9d82fd6123eb4891c566d30a293454e54e32ff7ead7644f5f7e575"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:24d574cb4072e1640b00864e94c4c89858033936ece3fc0e1c6f7179f120d0a8"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9735695a66aad654500b0193525e590c693ab3368478ce07b34b443a1ea5e824"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63d39709e17eb72685d052ac82acf0763e047f57c86af1b791505b1fec96915d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1ea2cc84224937d6b9b4c07f5cb7c667f2bde28c255645ba27f8a675a7af8234"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4024e6d35a9fdc1b3fd6ac7a673bd16cb176c7e0b952af6428b7129a70f72cce"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5e2dcc94dc9c9ab8704409d86fcb5236316e9dcef6feed8162287634e3568f4c"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa2de871801d824c255fac7e5e7e50f2be6c9c376fd9268b40c54b5e9da91f42"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:1314d4923c1509aa9696712a7bc0c7160d3b7acf72adafbbe6c558d523f5d491"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b750ecba3a8815ad8bc48311552f3f8ab99dd2326d29df7ff670d9c49321f48f"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d5117dce805d8a2f78baeddc8c6127281fa0a5e2c40c6dd992ba6b2b367876"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb35f607bd1cbe37b896379d5f5ed4dc298b536f4b959cb63180e05cacc0539d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:61c74710900602637d2d145dda9720c94e303380803bf68811b2a151deec75c2"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ece59af54ebf67ecbfbbf3ca9066f5687879e36527ad69d8b6e3ac565d565a62"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:d0a7c5f875068efe106f62233bc712d50db4d07c13c7db570175c7857a7b5dbd"},
{file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90e202e72a3d5aae673fcc8c9a4267d56b2f532beeb9173361293625fe4d2039"},
{file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2df98ef1b76f5a58bb493dda552259ba60c3a37557d848e039524203951c9f06"},
{file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7100adcda5e818b5a894ad0626e38118bb896a347f40ebed8981155675b9ba7b"},
{file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e410383f5d6a449a529d074e88af8bc80020bb42b402265f9c02c8358c11da5"},
{file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2797593760da6bcc32c4a13fa825af183cd4bfd333c60b3dbf84711afca26ef"},
{file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35a144fd681a0bea1013ccc4cd3fd4dc758ea17ee23dca019c02b82ec46fc0c4"},
{file = "backports_datetime_fromisoformat-2.0.3.tar.gz", hash = "sha256:b58edc8f517b66b397abc250ecc737969486703a66eb97e01e6d51291b1a139d"},
]
[[package]]
name = "bandit"
version = "1.8.3"
@@ -3350,23 +3408,19 @@ files = [
[[package]]
name = "marshmallow"
version = "3.26.2"
version = "4.3.0"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.9"
python-versions = ">=3.10"
groups = ["dev"]
files = [
{file = "marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73"},
{file = "marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57"},
{file = "marshmallow-4.3.0-py3-none-any.whl", hash = "sha256:46c4fe6984707e3cbd485dfebbf0a59874f58d695aad05c1668d15e8c6e13b46"},
{file = "marshmallow-4.3.0.tar.gz", hash = "sha256:fb43c53b3fe240b8f6af37223d6ef1636f927ad9bea8ab323afad95dff090880"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
tests = ["pytest", "simplejson"]
backports-datetime-fromisoformat = {version = "*", markers = "python_version < \"3.11\""}
typing-extensions = {version = "*", markers = "python_version < \"3.11\""}
[[package]]
name = "mccabe"
@@ -3662,14 +3716,14 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.23.0"
version = "1.55.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "msgraph_sdk-1.23.0-py3-none-any.whl", hash = "sha256:58e0047b4ca59fd82022c02cd73fec0170a3d84f3b76721e3db2a0314df9a58a"},
{file = "msgraph_sdk-1.23.0.tar.gz", hash = "sha256:6dd1ba9a46f5f0ce8599fd9610133adbd9d1493941438b5d3632fce9e55ed607"},
{file = "msgraph_sdk-1.55.0-py3-none-any.whl", hash = "sha256:c8e68ebc4b88af5111de312e7fa910a4e76ddf48a4534feadb1fb8a411c48cfc"},
{file = "msgraph_sdk-1.55.0.tar.gz", hash = "sha256:6df691a31954a050d26b8a678968017e157d940fb377f2a8a4e17a9741b98756"},
]
[package.dependencies]
@@ -6681,4 +6735,4 @@ files = [
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<3.13"
content-hash = "786921163bb46716defae1d9de1df001af2abf17edd3061165638707bcd28ce4"
content-hash = "09ce4507a464b318702ed8c6a738f3bb1bc4cc6ff5a50a9c2884f560af9ab034"
+31 -10
View File
@@ -2,7 +2,23 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [5.24.0] (Prowler UNRELEASED)
## [5.24.1] (Prowler v5.24.1)
### 🔄 Changed
- `msgraph-sdk` from 1.23.0 to 1.55.0 and `azure-mgmt-resource` from 23.3.0 to 24.0.0, removing `marshmallow` as is a transitively dev dependency [(#10733)](https://github.com/prowler-cloud/prowler/pull/10733)
### 🐞 Fixed
- Cloudflare account-scoped API tokens failing connection test in the App with `CloudflareUserTokenRequiredError` [(#10723)](https://github.com/prowler-cloud/prowler/pull/10723)
- Google Workspace Calendar checks false FAIL on unconfigured settings with secure Google defaults [(#10726)](https://github.com/prowler-cloud/prowler/pull/10726)
- `prowler image --registry-list` crashes with `AttributeError` because `ImageProvider.__init__` returns early before registering the global provider [(#10691)](https://github.com/prowler-cloud/prowler/pull/10691)
- Google Workspace Drive checks false FAIL on unconfigured settings with secure Google defaults [(#10727)](https://github.com/prowler-cloud/prowler/pull/10727)
- Cloudflare `validate_credentials` can hang in an infinite pagination loop when the SDK repeats accounts, blocking connection tests [(#10771)](https://github.com/prowler-cloud/prowler/pull/10771)
---
## [5.24.0] (Prowler v5.24.0)
### 🚀 Added
@@ -13,16 +29,21 @@ All notable changes to the **Prowler SDK** are documented in this file.
- `iam_role_access_not_stale_to_bedrock` and `iam_user_access_not_stale_to_bedrock` checks for AWS provider [(#10536)](https://github.com/prowler-cloud/prowler/pull/10536)
- `iam_policy_no_wildcard_marketplace_subscribe` and `iam_inline_policy_no_wildcard_marketplace_subscribe` checks for AWS provider [(#10525)](https://github.com/prowler-cloud/prowler/pull/10525)
- `bedrock_vpc_endpoints_configured` check for AWS provider [(#10591)](https://github.com/prowler-cloud/prowler/pull/10591)
- `exchange_organization_delicensing_resiliency_enabled` check for m365 provider [(#10608)](https://github.com/prowler-cloud/prowler/pull/10608)
- `exchange_organization_delicensing_resiliency_enabled` check for M365 provider [(#10608)](https://github.com/prowler-cloud/prowler/pull/10608)
- `entra_conditional_access_policy_mfa_enforced_for_guest_users` check for M365 provider [(#10616)](https://github.com/prowler-cloud/prowler/pull/10616)
- `entra_conditional_access_policy_corporate_device_sign_in_frequency_enforced` check for m365 provider [(#10618)](https://github.com/prowler-cloud/prowler/pull/10618)
- `entra_conditional_access_policy_block_unknown_device_platforms` check for m365 provider [(#10615)](https://github.com/prowler-cloud/prowler/pull/10615)
- `entra_conditional_access_policy_corporate_device_sign_in_frequency_enforced` check for M365 provider [(#10618)](https://github.com/prowler-cloud/prowler/pull/10618)
- `entra_conditional_access_policy_block_unknown_device_platforms` check for M365 provider [(#10615)](https://github.com/prowler-cloud/prowler/pull/10615)
- `--excluded-region` CLI flag, `PROWLER_AWS_DISALLOWED_REGIONS` environment variable, and `aws.disallowed_regions` config entry to skip specific AWS regions during scans [(#10688)](https://github.com/prowler-cloud/prowler/pull/10688)
### 🔄 Changed
- Bump Poetry to `2.3.4` and consolidate SDK workflows onto the `setup-python-poetry` composite action with opt-in lockfile regeneration [(#10681)](https://github.com/prowler-cloud/prowler/pull/10681)
- Normalize Conditional Access platform values in Entra models and simplify platform-based checks [(#10635)](https://github.com/prowler-cloud/prowler/pull/10635)
### 🐞 Fixed
- Vercel firewall config handling for team-scoped projects and current API response shapes [(#10695)](https://github.com/prowler-cloud/prowler/pull/10695)
---
## [5.23.0] (Prowler v5.23.0)
@@ -787,7 +808,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
- S3 `test_connection` uses AWS S3 API `HeadBucket` instead of `GetBucketLocation` [(#8456)](https://github.com/prowler-cloud/prowler/pull/8456)
- Add more validations to Azure Storage models when some values are None to avoid serialization issues [(#8325)](https://github.com/prowler-cloud/prowler/pull/8325)
- `sns_topics_not_publicly_accessible` false positive with `aws:SourceArn` conditions [(#8326)](https://github.com/prowler-cloud/prowler/issues/8326)
- Remove typo from description req 1.2.3 - Prowler ThreatScore m365 [(#8384)](https://github.com/prowler-cloud/prowler/pull/8384)
- Remove typo from description req 1.2.3 - Prowler ThreatScore M365 [(#8384)](https://github.com/prowler-cloud/prowler/pull/8384)
- Way of counting FAILED/PASS reqs from `kisa_isms_p_2023_aws` table [(#8382)](https://github.com/prowler-cloud/prowler/pull/8382)
- Use default tenant domain instead of first domain in list for Azure and M365 providers [(#8402)](https://github.com/prowler-cloud/prowler/pull/8402)
- Avoid multiple module error calls in M365 provider [(#8353)](https://github.com/prowler-cloud/prowler/pull/8353)
@@ -828,7 +849,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Title & description wording for `iam_user_accesskey_unused` check for AWS provider [(#8233)](https://github.com/prowler-cloud/prowler/pull/8233)
- Add GitHub provider to lateral panel in documentation and change -h environment variable output [(#8246)](https://github.com/prowler-cloud/prowler/pull/8246)
- Show `m365_identity_type` and `m365_identity_id` in cloud reports [(#8247)](https://github.com/prowler-cloud/prowler/pull/8247)
- Show `M365_identity_type` and `M365_identity_id` in cloud reports [(#8247)](https://github.com/prowler-cloud/prowler/pull/8247)
- Ensure `is_service_role` only returns `True` for service roles [(#8274)](https://github.com/prowler-cloud/prowler/pull/8274)
- Update DynamoDB check metadata to fix broken link [(#8273)](https://github.com/prowler-cloud/prowler/pull/8273)
- Show correct count of findings in Dashboard Security Posture page [(#8270)](https://github.com/prowler-cloud/prowler/pull/8270)
@@ -950,9 +971,9 @@ All notable changes to the **Prowler SDK** are documented in this file.
### Fixed
- `m365_powershell test_credentials` to use sanitized credentials [(#7761)](https://github.com/prowler-cloud/prowler/pull/7761)
- `M365_powershell test_credentials` to use sanitized credentials [(#7761)](https://github.com/prowler-cloud/prowler/pull/7761)
- `admincenter_users_admins_reduced_license_footprint` check logic to pass when admin user has no license [(#7779)](https://github.com/prowler-cloud/prowler/pull/7779)
- `m365_powershell` to close the PowerShell sessions in msgraph services [(#7816)](https://github.com/prowler-cloud/prowler/pull/7816)
- `M365_powershell` to close the PowerShell sessions in msgraph services [(#7816)](https://github.com/prowler-cloud/prowler/pull/7816)
- `defender_ensure_notify_alerts_severity_is_high`check to accept high or lower severity [(#7862)](https://github.com/prowler-cloud/prowler/pull/7862)
- Replace `Directory.Read.All` permission with `Domain.Read.All` which is more restrictive [(#7888)](https://github.com/prowler-cloud/prowler/pull/7888)
- Split calls to list Azure Functions attributes [(#7778)](https://github.com/prowler-cloud/prowler/pull/7778)
@@ -1026,7 +1047,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
- New check `teams_meeting_chat_anonymous_users_disabled` [(#7579)](https://github.com/prowler-cloud/prowler/pull/7579)
- Prowler Threat Score Compliance Framework [(#7603)](https://github.com/prowler-cloud/prowler/pull/7603)
- Documentation for M365 provider [(#7622)](https://github.com/prowler-cloud/prowler/pull/7622)
- Support for m365 provider in Prowler Dashboard [(#7633)](https://github.com/prowler-cloud/prowler/pull/7633)
- Support for M365 provider in Prowler Dashboard [(#7633)](https://github.com/prowler-cloud/prowler/pull/7633)
- New check for Modern Authentication enabled for Exchange Online in M365 [(#7636)](https://github.com/prowler-cloud/prowler/pull/7636)
- New check `sharepoint_onedrive_sync_restricted_unmanaged_devices` [(#7589)](https://github.com/prowler-cloud/prowler/pull/7589)
- New check for Additional Storage restricted for Exchange in M365 [(#7638)](https://github.com/prowler-cloud/prowler/pull/7638)
@@ -1036,7 +1057,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
- New check for MailTips full enabled for Exchange in M365 [(#7637)](https://github.com/prowler-cloud/prowler/pull/7637)
- New check for Comprehensive Attachments Filter Applied for Defender in M365 [(#7661)](https://github.com/prowler-cloud/prowler/pull/7661)
- Modified check `exchange_mailbox_properties_auditing_enabled` to make it configurable [(#7662)](https://github.com/prowler-cloud/prowler/pull/7662)
- snapshots to m365 documentation [(#7673)](https://github.com/prowler-cloud/prowler/pull/7673)
- snapshots to M365 documentation [(#7673)](https://github.com/prowler-cloud/prowler/pull/7673)
- support for static credentials for sending findings to Amazon S3 and AWS Security Hub [(#7322)](https://github.com/prowler-cloud/prowler/pull/7322)
- Prowler ThreatScore for M365 provider [(#7692)](https://github.com/prowler-cloud/prowler/pull/7692)
- Microsoft User and User Credential auth to reports [(#7681)](https://github.com/prowler-cloud/prowler/pull/7681)
+11 -3
View File
@@ -69,11 +69,11 @@ from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
from prowler.lib.outputs.compliance.cis.cis_github import GithubCIS
from prowler.lib.outputs.compliance.cis.cis_googleworkspace import GoogleWorkspaceCIS
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
from prowler.lib.outputs.compliance.cis.cis_oraclecloud import OracleCloudCIS
from prowler.lib.outputs.compliance.cisa_scuba.cisa_scuba_googleworkspace import (
GoogleWorkspaceCISASCuBA,
)
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
from prowler.lib.outputs.compliance.cis.cis_oraclecloud import OracleCloudCIS
from prowler.lib.outputs.compliance.compliance import display_compliance_table
from prowler.lib.outputs.compliance.csa.csa_alibabacloud import AlibabaCloudCSA
from prowler.lib.outputs.compliance.csa.csa_aws import AWSCSA
@@ -293,6 +293,10 @@ def prowler():
if not args.only_logs:
global_provider.print_credentials()
# --registry-list: listing already printed during provider init, exit
if getattr(global_provider, "_listing_only", False):
sys.exit()
# Skip service and check loading for external-tool providers
if provider not in EXTERNAL_TOOL_PROVIDERS:
# Import custom checks from folder
@@ -1311,8 +1315,12 @@ def prowler():
global_provider.identity.audited_regions,
)
if not global_provider.identity.audited_regions
else global_provider.identity.audited_regions
else set(global_provider.identity.audited_regions)
)
if global_provider._enabled_regions is not None:
security_hub_regions = security_hub_regions.intersection(
global_provider._enabled_regions
)
security_hub = SecurityHub(
aws_account_id=global_provider.identity.account,
+1 -1
View File
@@ -38,7 +38,7 @@ class _MutableTimestamp:
timestamp = _MutableTimestamp(datetime.today())
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
prowler_version = "5.24.0"
prowler_version = "5.24.2"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
+4
View File
@@ -3,6 +3,10 @@ aws:
# AWS Global Configuration
# aws.mute_non_default_regions --> Set to True to muted failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
mute_non_default_regions: False
# aws.disallowed_regions --> List of AWS regions to exclude from the scan.
# Also settable via the PROWLER_AWS_DISALLOWED_REGIONS environment variable or
# the --excluded-region CLI flag. Precedence: CLI > env var > config file.
# disallowed_regions: []
# If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`:
# Mutelist:
# Accounts:
+136 -41
View File
@@ -111,6 +111,7 @@ class AwsProvider(Provider):
mfa: bool = False,
profile: str = None,
regions: set = set(),
excluded_regions: set = None,
organizations_role_arn: str = None,
scan_unused_services: bool = False,
resource_tags: list[str] = [],
@@ -136,6 +137,10 @@ class AwsProvider(Provider):
- mfa: A boolean indicating whether MFA is enabled.
- profile: The name of the AWS CLI profile to use.
- regions: A set of regions to audit.
- excluded_regions: A set of regions to skip during the scan. Applied
on top of `regions` and of the account's enabled regions. Also
settable via the PROWLER_AWS_DISALLOWED_REGIONS environment variable
or the `disallowed_regions` key in the provider config file.
- organizations_role_arn: The ARN of the AWS Organizations IAM role to assume.
- scan_unused_services: A boolean indicating whether to scan unused services. False by default.
- resource_tags: A list of tags to filter the resources to audit.
@@ -190,6 +195,33 @@ class AwsProvider(Provider):
logger.info("Initializing AWS provider ...")
# Load provider config early because provider-level settings can affect
# bootstrap region selection before the scan starts.
if config_content is not None:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(self._type, config_path)
excluded_regions = self.resolve_excluded_regions(
excluded_regions, self._audit_config
)
# Normalize excluded_regions and prune the include-list up front so
# every downstream consumer (identity, STS region, service/region
# enumeration) sees an already-filtered view.
if excluded_regions and regions:
regions = set(regions) - excluded_regions
if not regions:
raise AWSArgumentTypeValidationError(
message=(
"All requested AWS regions are excluded by the "
"disallowed regions configuration."
),
file=pathlib.Path(__file__).name,
)
######## AWS Session
logger.info("Generating original session ...")
@@ -215,7 +247,7 @@ class AwsProvider(Provider):
# After the session is created, validate it
logger.info("Validating credentials ...")
sts_region = get_aws_region_for_sts(
self.session.current_session.region_name, regions
self.session.current_session.region_name, regions, excluded_regions
)
# Validate the credentials
@@ -229,7 +261,9 @@ class AwsProvider(Provider):
######## AWS Provider Identity
# Get profile region
profile_region = self.get_profile_region(self._session.current_session)
profile_region = self.get_profile_region(
self._session.current_session, excluded_regions
)
# Set identity
self._identity = self.set_identity(
@@ -332,7 +366,26 @@ class AwsProvider(Provider):
)
########
# Parse Scan Tags
# Get Enabled Regions
self._enabled_regions = self.get_aws_enabled_regions(
self._session.current_session
)
# Apply the exclusion to the account's enabled regions. This is the
# gate used by generate_regional_clients, so skipped regions never get
# a boto3 client created for them and cannot stall the scan.
if excluded_regions:
if self._enabled_regions is not None:
self._enabled_regions = self._enabled_regions - excluded_regions
if self._identity.audited_regions:
self._identity.audited_regions = (
set(self._identity.audited_regions) - excluded_regions
)
logger.info(f"Excluding AWS regions from scan: {sorted(excluded_regions)}")
self._excluded_regions = excluded_regions
# Parse Scan Tags after region exclusions are applied so tag discovery
# also skips disallowed regions.
if resource_tags:
self._audit_resources = self.get_tagged_resources(resource_tags)
@@ -340,22 +393,9 @@ class AwsProvider(Provider):
if resource_arn:
self._audit_resources = resource_arn
# Get Enabled Regions
self._enabled_regions = self.get_aws_enabled_regions(
self._session.current_session
)
# Set ignore unused services
self._scan_unused_services = scan_unused_services
# Audit Config
if config_content:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(self._type, config_path)
# Fixer Config
self._fixer_config = fixer_config
@@ -468,12 +508,53 @@ class AwsProvider(Provider):
)
@staticmethod
def get_profile_region(session: Session):
profile_region = AWS_REGION_US_EAST_1
if session.region_name:
profile_region = session.region_name
def resolve_excluded_regions(
excluded_regions: set | list | tuple | None,
audit_config: dict | None,
) -> set[str]:
"""Resolve AWS region exclusions with precedence arg > env > config."""
if excluded_regions is not None:
raw_regions = excluded_regions
else:
raw_regions = Provider.get_excluded_regions_from_env()
if not raw_regions and isinstance(audit_config, dict):
raw_regions = audit_config.get("disallowed_regions") or []
return profile_region
return {str(region).strip() for region in raw_regions if str(region).strip()}
@staticmethod
def get_bootstrap_region_candidates(session_region: str | None) -> tuple[str, ...]:
"""Return safe fallback regions for bootstrap AWS calls."""
if session_region:
if session_region.startswith("cn-"):
return ("cn-north-1", "cn-northwest-1")
if session_region.startswith("us-gov-"):
return ("us-gov-east-1", "us-gov-west-1")
if session_region.startswith("eusc-"):
return ("eusc-de-east-1",)
if session_region.startswith("us-iso"):
return (session_region,)
return (AWS_STS_GLOBAL_ENDPOINT_REGION, "us-east-2", "us-west-2", "eu-west-1")
@staticmethod
def get_profile_region(
session: Session, excluded_regions: set[str] | None = None
) -> str:
excluded_regions = set(excluded_regions or ())
session_region = session.region_name
if session_region and session_region not in excluded_regions:
return session_region
for region in AwsProvider.get_bootstrap_region_candidates(session_region):
if region not in excluded_regions:
if session_region and session_region != region:
logger.info(
f"Configured AWS profile region {session_region} is excluded; using {region} for bootstrap clients."
)
return region
return session_region or AWS_REGION_US_EAST_1
@staticmethod
def set_identity(
@@ -701,12 +782,15 @@ class AwsProvider(Provider):
Caller Identity ARN: arn:aws:iam::123456789012:user/prowler
```
"""
# Beautify audited regions, set "all" if there is no filter region
regions = (
", ".join(self._identity.audited_regions)
if self._identity.audited_regions is not None
else "all"
)
# Beautify audited regions. If the scan includes all regions but some
# are explicitly excluded, reflect that in the banner instead of
# showing the misleading "all" label.
if self._identity.audited_regions:
regions = ", ".join(sorted(self._identity.audited_regions))
elif getattr(self, "_excluded_regions", None):
regions = f"all except {', '.join(sorted(self._excluded_regions))}"
else:
regions = "all"
# Beautify audited profile, set "default" if there is no profile set
profile = (
self._identity.profile if self._identity.profile is not None else "default"
@@ -745,6 +829,8 @@ class AwsProvider(Provider):
service_regions = AwsProvider.get_available_aws_service_regions(
service, self._identity.partition, self._identity.audited_regions
)
if getattr(self, "_excluded_regions", None):
service_regions = service_regions - self._excluded_regions
# Get the regions enabled for the account and get the intersection with the service available regions
if self._enabled_regions is not None:
@@ -962,6 +1048,8 @@ class AwsProvider(Provider):
service_regions = AwsProvider.get_available_aws_service_regions(
service, self._identity.partition, self._identity.audited_regions
)
if getattr(self, "_excluded_regions", None):
service_regions = service_regions - self._excluded_regions
default_region = self.get_global_region()
# global region of the partition when all regions are audited and there is no profile region
if self._identity.profile_region in service_regions:
@@ -1565,13 +1653,19 @@ def read_aws_regions_file() -> dict:
# TODO: This can be moved to another class since it doesn't need self
def get_aws_region_for_sts(session_region: str, regions: set[str]) -> str:
def get_aws_region_for_sts(
session_region: str,
regions: set[str],
excluded_regions: set[str] | None = None,
) -> str:
"""
Get the AWS region for the STS Assume Role operation.
Args:
- session_region (str): The region configured in the AWS session.
- regions (set[str]): The regions passed with the -f/--region/--filter-region option.
- excluded_regions (set[str] | None): Regions that should be avoided for
bootstrap calls when possible.
Returns:
str: The AWS region for the STS Assume Role operation
@@ -1579,20 +1673,21 @@ def get_aws_region_for_sts(session_region: str, regions: set[str]) -> str:
Example:
aws_region = get_aws_region_for_sts(session_region, regions)
"""
# If there is no region passed with -f/--region/--filter-region
if regions is None or len(regions) == 0:
# If you have a region configured in your AWS config or credentials file
if session_region is not None:
aws_region = session_region
else:
# If there is no region set passed with -f/--region
# we use the Global STS Endpoint Region, us-east-1
aws_region = AWS_STS_GLOBAL_ENDPOINT_REGION
else:
# Get the first region passed to the -f/--region
aws_region = list(regions)[0]
excluded_regions = set(excluded_regions or ())
return aws_region
if regions:
for region in regions:
if region not in excluded_regions:
return region
if session_region and session_region not in excluded_regions:
return session_region
for region in AwsProvider.get_bootstrap_region_candidates(session_region):
if region not in excluded_regions:
return region
return session_region or AWS_STS_GLOBAL_ENDPOINT_REGION
# TODO: this duplicates the provider arguments validation library
@@ -66,6 +66,16 @@ def init_parser(self):
help="AWS region names to run Prowler against",
choices=AwsProvider.get_regions(partition=None),
)
aws_regions_subparser.add_argument(
"--excluded-region",
"--excluded-regions",
nargs="+",
help=(
"AWS region names to exclude from the scan. Overrides the "
"PROWLER_AWS_DISALLOWED_REGIONS environment variable when set."
),
choices=AwsProvider.get_regions(partition=None),
)
# AWS Organizations
aws_orgs_subparser = aws_parser.add_argument_group("AWS Organizations")
aws_orgs_subparser.add_argument(
@@ -30,10 +30,12 @@ def quick_inventory(provider: AwsProvider, args):
ec2_client = provider.session.current_session.client(
"ec2", region_name=provider.identity.profile_region
)
excluded_regions = getattr(provider, "_excluded_regions", set())
# Get all the available regions
provider.identity.audited_regions = [
region["RegionName"]
for region in ec2_client.describe_regions()["Regions"]
if region["RegionName"] not in excluded_regions
]
with alive_bar(
@@ -274,8 +274,12 @@ class CloudflareProvider(Provider):
for account in client.accounts.list():
account_id = getattr(account, "id", None)
# Prevent infinite loop - skip if we've seen this account
# Prevent infinite loop on repeated pages from the SDK paginator
if account_id in seen_account_ids:
logger.warning(
"Detected repeated Cloudflare account ID while listing accounts. "
"Stopping pagination to avoid an infinite loop."
)
break
seen_account_ids.add(account_id)
@@ -332,19 +336,16 @@ class CloudflareProvider(Provider):
return
except PermissionDeniedError as error:
error_str = str(error)
# Check for user-level authentication required (code 9109)
if "9109" in error_str:
logger.error(f"CloudflareUserTokenRequiredError: {error}")
raise CloudflareUserTokenRequiredError(
file=os.path.basename(__file__),
)
# Check for invalid API key or email (code 9103) - comes as 403
if "9103" in error_str or "Unknown X-Auth-Key" in error_str:
logger.error(f"CloudflareInvalidAPIKeyError: {error}")
raise CloudflareInvalidAPIKeyError(
file=os.path.basename(__file__),
)
# For other permission errors, try accounts.list() as fallback
# For permission errors (including 9109 account-scoped tokens),
# try accounts.list() as fallback before failing.
# Error 9109 means the token is account-scoped, not user-level,
# which is valid for scanning — only fail if accounts.list() also fails.
logger.warning(
f"Unable to retrieve Cloudflare user info: {error}. "
"Trying accounts.list() as fallback."
@@ -398,7 +399,20 @@ class CloudflareProvider(Provider):
# Fallback: try accounts.list()
try:
accounts = list(client.accounts.list())
accounts: list = []
seen_account_ids: set = set()
for account in client.accounts.list():
account_id = getattr(account, "id", None)
# Prevent infinite loop on repeated pages from the SDK paginator
if account_id in seen_account_ids:
logger.warning(
"Detected repeated Cloudflare account ID while validating credentials. "
"Stopping pagination to avoid an infinite loop."
)
break
seen_account_ids.add(account_id)
accounts.append(account)
if not accounts:
logger.error("CloudflareNoAccountsError: No accounts found")
raise CloudflareNoAccountsError(
+19
View File
@@ -1,4 +1,5 @@
import importlib
import os
import pkgutil
import sys
from abc import ABC, abstractmethod
@@ -135,6 +136,18 @@ class Provider(ABC):
"""
return set()
@staticmethod
def get_excluded_regions_from_env() -> set:
"""Parse the PROWLER_AWS_DISALLOWED_REGIONS environment variable.
The variable is a comma-separated list of region identifiers to skip
during scans (e.g. "me-south-1, ap-east-1"). Whitespace around entries
is tolerated and empty entries are dropped. Returns an empty set when
the variable is unset or contains no usable values.
"""
raw = os.environ.get("PROWLER_AWS_DISALLOWED_REGIONS", "")
return {region.strip() for region in raw.split(",") if region.strip()}
@staticmethod
def get_global_provider() -> "Provider":
return Provider._global
@@ -160,6 +173,11 @@ class Provider(ABC):
if not isinstance(Provider._global, provider_class):
if "aws" in provider_class_name.lower():
excluded_regions = (
set(arguments.excluded_region)
if getattr(arguments, "excluded_region", None)
else None
)
provider_class(
retries_max_attempts=arguments.aws_retries_max_attempts,
role_arn=arguments.role,
@@ -169,6 +187,7 @@ class Provider(ABC):
mfa=arguments.mfa,
profile=arguments.profile,
regions=set(arguments.region) if arguments.region else None,
excluded_regions=excluded_regions,
organizations_role_arn=arguments.organizations_role,
scan_unused_services=arguments.scan_unused_services,
resource_tags=arguments.resource_tag,
@@ -35,21 +35,20 @@ class calendar_external_invitations_warning(Check):
f"External invitation warnings for Google Calendar are enabled "
f"in domain {calendar_client.provider.identity.domain}."
)
elif warning_enabled is None:
report.status = "PASS"
report.status_extended = (
f"External invitation warnings for Google Calendar use Google's "
f"secure default configuration (enabled) "
f"in domain {calendar_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warning_enabled is None:
report.status_extended = (
f"External invitation warnings for Google Calendar are not "
f"explicitly configured in domain "
f"{calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
else:
report.status_extended = (
f"External invitation warnings for Google Calendar are disabled "
f"in domain {calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
report.status_extended = (
f"External invitation warnings for Google Calendar are disabled "
f"in domain {calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
findings.append(report)
@@ -36,20 +36,20 @@ class calendar_external_sharing_primary_calendar(Check):
f"{calendar_client.provider.identity.domain} is restricted to "
f"free/busy information only."
)
elif sharing is None:
report.status = "PASS"
report.status_extended = (
f"Primary calendar external sharing uses Google's secure default "
f"configuration (free/busy only) "
f"in domain {calendar_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if sharing is None:
report.status_extended = (
f"Primary calendar external sharing is not explicitly configured "
f"in domain {calendar_client.provider.identity.domain}. "
f"External sharing should be restricted to free/busy information only."
)
else:
report.status_extended = (
f"Primary calendar external sharing in domain "
f"{calendar_client.provider.identity.domain} is set to {sharing}. "
f"External sharing should be restricted to free/busy information only."
)
report.status_extended = (
f"Primary calendar external sharing in domain "
f"{calendar_client.provider.identity.domain} is set to {sharing}. "
f"External sharing should be restricted to free/busy information only."
)
findings.append(report)
@@ -33,21 +33,20 @@ class drive_external_sharing_warn_users(Check):
f"External sharing warnings for Drive and Docs are enabled "
f"in domain {drive_client.provider.identity.domain}."
)
elif warning_enabled is None:
report.status = "PASS"
report.status_extended = (
f"External sharing warnings for Drive and Docs use Google's "
f"secure default configuration (enabled) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warning_enabled is None:
report.status_extended = (
f"External sharing warnings for Drive and Docs are not "
f"explicitly configured in domain "
f"{drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
else:
report.status_extended = (
f"External sharing warnings for Drive and Docs are disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
report.status_extended = (
f"External sharing warnings for Drive and Docs are disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
findings.append(report)
@@ -35,22 +35,21 @@ class drive_shared_drive_creation_allowed(Check):
f"Users in domain {drive_client.provider.identity.domain} "
f"are allowed to create new shared drives."
)
elif allow_creation is None:
report.status = "PASS"
report.status_extended = (
f"Shared drive creation uses Google's secure default "
f"configuration (allowed) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if allow_creation is None:
report.status_extended = (
f"Shared drive creation is not explicitly configured in "
f"domain {drive_client.provider.identity.domain}. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
else:
report.status_extended = (
f"Users in domain {drive_client.provider.identity.domain} "
f"are prevented from creating new shared drives. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
report.status_extended = (
f"Users in domain {drive_client.provider.identity.domain} "
f"are prevented from creating new shared drives. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
findings.append(report)
@@ -35,21 +35,21 @@ class drive_shared_drive_disable_download_print_copy(Check):
f"{drive_client.provider.identity.domain} is restricted to "
f"{allowed}."
)
elif allowed is None:
report.status = "PASS"
report.status_extended = (
f"Download, print, and copy restrictions for shared drives use "
f"Google's secure default configuration (disabled for viewers "
f"and commenters) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if allowed is None:
report.status_extended = (
f"Download, print, and copy restrictions for shared drive "
f"viewers and commenters are not explicitly configured in "
f"domain {drive_client.provider.identity.domain}. "
f"These actions should be restricted to editors or managers only."
)
else:
report.status_extended = (
f"Download, print, and copy in shared drives in domain "
f"{drive_client.provider.identity.domain} is set to {allowed}. "
f"These actions should be restricted to editors or managers only."
)
report.status_extended = (
f"Download, print, and copy in shared drives in domain "
f"{drive_client.provider.identity.domain} is set to {allowed}. "
f"These actions should be restricted to editors or managers only."
)
findings.append(report)
@@ -36,21 +36,20 @@ class drive_warn_sharing_with_allowlisted_domains(Check):
f"Users are warned when sharing files with allowlisted "
f"domains in domain {drive_client.provider.identity.domain}."
)
elif warn_enabled is None:
report.status = "PASS"
report.status_extended = (
f"Warning when sharing with allowlisted domains uses Google's "
f"secure default configuration (enabled) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warn_enabled is None:
report.status_extended = (
f"Warning when sharing with allowlisted domains is not "
f"explicitly configured in domain "
f"{drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
else:
report.status_extended = (
f"Warning when sharing with allowlisted domains is disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
report.status_extended = (
f"Warning when sharing with allowlisted domains is disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
findings.append(report)
+41 -33
View File
@@ -163,42 +163,50 @@ class ImageProvider(Provider):
# Registry scan mode: enumerate images from registry
if self.registry:
self._enumerate_registry()
if self._listing_only:
return
for image in self.images:
self._validate_image_name(image)
if not self.images:
raise ImageNoImagesProvidedError(
file=__file__,
message="No images provided for scanning.",
)
# Audit Config
if config_content:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(self._type, config_path)
# Fixer Config
self._fixer_config = fixer_config if fixer_config is not None else {}
# Mutelist (not needed for Image provider since Trivy has its own logic)
# Safe defaults for listing-only mode (overwritten below in scan mode)
self._audit_config = {}
self._fixer_config = {}
self._mutelist = None
self.audit_metadata = None
self.audit_metadata = Audit_Metadata(
provider=self._type,
account_id=self.audited_account,
account_name="image",
region=self.region,
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
)
# Skip scan setup for listing-only mode
if not self._listing_only:
for image in self.images:
self._validate_image_name(image)
if not self.images:
raise ImageNoImagesProvidedError(
file=__file__,
message="No images provided for scanning.",
)
# Audit Config
if config_content:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(
self._type, config_path
)
# Fixer Config
self._fixer_config = fixer_config if fixer_config is not None else {}
# Mutelist (not needed for Image provider since Trivy has its own logic)
self._mutelist = None
self.audit_metadata = Audit_Metadata(
provider=self._type,
account_id=self.audited_account,
account_name="image",
region=self.region,
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
)
Provider.set_global_provider(self)
@@ -55,6 +55,7 @@ class Project(VercelService):
# Parse password protection
pwd_protection = proj.get("passwordProtection")
security = proj.get("security", {}) or {}
self.projects[project_id] = VercelProject(
id=project_id,
@@ -75,6 +76,16 @@ class Project(VercelService):
git_fork_protection=proj.get("gitForkProtection", True),
git_repository=proj.get("link"),
secure_compute=proj.get("secureCompute"),
firewall_enabled=security.get("firewallEnabled"),
firewall_config_version=(
str(security.get("firewallConfigVersion"))
if security.get("firewallConfigVersion") is not None
else None
),
managed_rules=security.get(
"managedRules", security.get("managedRulesets")
),
bot_id_enabled=security.get("botIdEnabled"),
)
logger.info(f"Project - Found {len(self.projects)} project(s)")
@@ -160,4 +171,8 @@ class VercelProject(BaseModel):
git_fork_protection: bool = True
git_repository: Optional[dict] = None
secure_compute: Optional[dict] = None
firewall_enabled: Optional[bool] = None
firewall_config_version: Optional[str] = None
managed_rules: Optional[dict] = None
bot_id_enabled: Optional[bool] = None
environment_variables: list[VercelEnvironmentVariable] = Field(default_factory=list)
@@ -26,10 +26,7 @@ class Security(VercelService):
def _fetch_firewall_config(self, project):
"""Fetch WAF/Firewall config for a single project."""
try:
data = self._get(
"/v1/security/firewall/config",
params={"projectId": project.id},
)
data = self._read_firewall_config(project)
if data is None:
# 403 — plan limitation, store with managed_rulesets=None
@@ -44,39 +41,60 @@ class Security(VercelService):
)
return
# Parse firewall config
fw = data.get("firewallConfig", data) if isinstance(data, dict) else {}
fw = self._normalize_firewall_config(data)
# Determine if firewall is enabled
rules = fw.get("rules", []) or []
managed = fw.get("managedRules", fw.get("managedRulesets"))
if not fw:
fallback_firewall_enabled = self._fallback_firewall_enabled(project)
self.firewall_configs[project.id] = VercelFirewallConfig(
project_id=project.id,
project_name=project.name,
team_id=project.team_id,
firewall_enabled=(
fallback_firewall_enabled
if fallback_firewall_enabled is not None
else False
),
managed_rulesets=self._fallback_managed_rulesets(project),
name=project.name,
id=project.id,
)
return
rules = [
rule for rule in (fw.get("rules", []) or []) if self._is_active(rule)
]
managed = self._active_managed_rulesets(
fw.get("managedRules", fw.get("managedRulesets", fw.get("crs")))
)
custom_rules = []
ip_blocking = []
ip_blocking = list(fw.get("ips", []) or [])
rate_limiting = []
for rule in rules:
rule_action = rule.get("action", {})
action_type = (
rule_action.get("type", "")
if isinstance(rule_action, dict)
else str(rule_action)
)
mitigate_action = self._mitigate_action(rule)
if action_type == "rate_limit" or rule.get("rateLimit"):
if self._is_rate_limiting_rule(rule, mitigate_action):
rate_limiting.append(rule)
elif action_type in ("deny", "block") and self._is_ip_rule(rule):
elif self._is_ip_rule(rule):
ip_blocking.append(rule)
else:
custom_rules.append(rule)
firewall_enabled = bool(rules) or bool(managed)
firewall_enabled = fw.get("firewallEnabled")
if firewall_enabled is None:
firewall_enabled = self._fallback_firewall_enabled(project)
if firewall_enabled is None:
firewall_enabled = bool(rules) or bool(ip_blocking) or bool(managed)
if not managed:
managed = self._fallback_managed_rulesets(project)
self.firewall_configs[project.id] = VercelFirewallConfig(
project_id=project.id,
project_name=project.name,
team_id=project.team_id,
firewall_enabled=firewall_enabled,
managed_rulesets=managed if managed is not None else {},
managed_rulesets=managed,
custom_rules=custom_rules,
ip_blocking_rules=ip_blocking,
rate_limiting_rules=rate_limiting,
@@ -95,6 +113,117 @@ class Security(VercelService):
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _read_firewall_config(self, project):
"""Read the deployed firewall config via the documented endpoint.
See: https://vercel.com/docs/rest-api/security/read-firewall-configuration
"""
params = self._firewall_params(project)
config_version = getattr(project, "firewall_config_version", None)
endpoints = []
if config_version:
endpoints.append(f"/v1/security/firewall/config/{config_version}")
endpoints.append("/v1/security/firewall/config/active")
last_error = None
for endpoint in endpoints:
try:
return self._get(endpoint, params=params)
except Exception as error:
last_error = error
logger.warning(
f"Security - Firewall config read failed for project "
f"{project.id} (team={getattr(project, 'team_id', None)}) "
f"on {endpoint} with params={params}: "
f"{error.__class__.__name__}: {error}"
)
if last_error is not None:
logger.debug(
f"Security - Falling back to firewall config wrapper for "
f"{project.id} after {last_error.__class__.__name__}: {last_error}"
)
return self._get("/v1/security/firewall/config", params=params)
@staticmethod
def _firewall_params(project) -> dict:
"""Build firewall request params, preserving team scope for team projects."""
params = {"projectId": project.id}
team_id = getattr(project, "team_id", None)
if isinstance(team_id, str) and team_id:
params["teamId"] = team_id
return params
@staticmethod
def _normalize_firewall_config(data: dict) -> dict:
"""Normalize firewall responses across Vercel endpoint variants."""
if not isinstance(data, dict):
return {}
if "firewallConfig" in data and isinstance(data["firewallConfig"], dict):
return data["firewallConfig"]
if any(key in data for key in ("active", "draft", "versions")):
return data.get("active") or {}
return data
@staticmethod
def _active_managed_rulesets(managed_rules: dict | None) -> dict:
"""Return only active managed rulesets."""
if not isinstance(managed_rules, dict):
return {}
return {
ruleset: config
for ruleset, config in managed_rules.items()
if not isinstance(config, dict) or config.get("active", False)
}
@classmethod
def _fallback_managed_rulesets(cls, project) -> dict:
"""Return active managed rulesets from project metadata."""
return cls._active_managed_rulesets(getattr(project, "managed_rules", None))
@staticmethod
def _fallback_firewall_enabled(project) -> bool | None:
"""Return firewall enabled state from project metadata when available."""
return getattr(project, "firewall_enabled", None)
@staticmethod
def _mitigate_action(rule: dict) -> dict:
"""Extract the nested Vercel mitigation action payload for a rule."""
action = rule.get("action", {})
if not isinstance(action, dict):
return {}
mitigate = action.get("mitigate")
return mitigate if isinstance(mitigate, dict) else action
@staticmethod
def _is_active(rule: dict) -> bool:
"""Treat missing active flags as enabled for backwards compatibility."""
return rule.get("active", True) is not False
@classmethod
def _is_rate_limiting_rule(
cls, rule: dict, mitigate_action: dict | None = None
) -> bool:
"""Check if a firewall rule enforces rate limiting."""
if rule.get("rateLimit"):
return True
mitigate = (
mitigate_action
if isinstance(mitigate_action, dict)
else cls._mitigate_action(rule)
)
return bool(mitigate.get("rateLimit")) or mitigate.get("action") == "rate_limit"
@staticmethod
def _is_ip_rule(rule: dict) -> bool:
"""Check if a rule is an IP blocking rule based on conditions."""
+3 -4
View File
@@ -30,7 +30,7 @@ dependencies = [
"azure-mgmt-postgresqlflexibleservers==1.1.0",
"azure-mgmt-recoveryservices==3.1.0",
"azure-mgmt-recoveryservicesbackup==9.2.0",
"azure-mgmt-resource==23.3.0",
"azure-mgmt-resource==24.0.0",
"azure-mgmt-search==9.1.0",
"azure-mgmt-security==7.0.0",
"azure-mgmt-sql==3.0.1",
@@ -57,7 +57,7 @@ dependencies = [
"kubernetes==32.0.1",
"markdown==3.10.2",
"microsoft-kiota-abstractions==1.9.2",
"msgraph-sdk==1.23.0",
"msgraph-sdk==1.55.0",
"numpy==2.0.2",
"openstacksdk==4.2.0",
"pandas==2.2.3",
@@ -95,7 +95,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">=3.10,<3.13"
version = "5.24.0"
version = "5.24.2"
[project.scripts]
prowler = "prowler.__main__:prowler"
@@ -121,7 +121,6 @@ docker = "7.1.0"
filelock = "3.20.3"
flake8 = "7.1.2"
freezegun = "1.5.1"
marshmallow = "==3.26.2"
mock = "5.2.0"
moto = {extras = ["all"], version = "5.1.11"}
openapi-schema-validator = "0.6.3"
+144
View File
@@ -839,6 +839,132 @@ aws:
assert isinstance(aws_provider, AwsProvider)
@mock_aws
def test_excluded_regions_removed_from_enabled_regions(self):
aws_provider = AwsProvider(excluded_regions={AWS_REGION_EU_WEST_1})
assert AWS_REGION_EU_WEST_1 not in aws_provider._enabled_regions
assert AWS_REGION_EU_WEST_1 not in aws_provider.generate_regional_clients("ec2")
@mock_aws
def test_excluded_regions_pruned_from_input_regions(self):
aws_provider = AwsProvider(
regions={AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1},
excluded_regions={AWS_REGION_EU_WEST_1},
)
assert AWS_REGION_EU_WEST_1 not in aws_provider._identity.audited_regions
assert AWS_REGION_US_EAST_1 in aws_provider._identity.audited_regions
@mock_aws
def test_excluded_regions_from_config_file(self):
with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as tmp:
tmp.write(f"aws:\n disallowed_regions:\n - {AWS_REGION_EU_WEST_1}\n")
config_path = tmp.name
try:
aws_provider = AwsProvider(config_path=config_path)
assert AWS_REGION_EU_WEST_1 not in aws_provider._enabled_regions
assert aws_provider._excluded_regions == {AWS_REGION_EU_WEST_1}
finally:
os.remove(config_path)
@mock_aws
def test_excluded_regions_from_env_on_direct_provider_init(self):
with mock.patch.dict(
os.environ,
{"PROWLER_AWS_DISALLOWED_REGIONS": AWS_REGION_EU_WEST_1},
clear=False,
):
aws_provider = AwsProvider()
assert aws_provider._excluded_regions == {AWS_REGION_EU_WEST_1}
assert AWS_REGION_EU_WEST_1 not in aws_provider._enabled_regions
@mock_aws
def test_excluded_regions_precedence_explicit_over_env_and_config(self):
with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as tmp:
tmp.write(f"aws:\n disallowed_regions:\n - {AWS_REGION_EU_WEST_1}\n")
config_path = tmp.name
try:
with mock.patch.dict(
os.environ,
{"PROWLER_AWS_DISALLOWED_REGIONS": AWS_REGION_US_EAST_1},
clear=False,
):
aws_provider = AwsProvider(
config_path=config_path,
excluded_regions={AWS_REGION_US_EAST_2},
)
assert aws_provider._excluded_regions == {AWS_REGION_US_EAST_2}
assert AWS_REGION_US_EAST_2 not in aws_provider._enabled_regions
assert AWS_REGION_EU_WEST_1 in aws_provider._enabled_regions
assert AWS_REGION_US_EAST_1 in aws_provider._enabled_regions
finally:
os.remove(config_path)
@mock_aws
def test_excluded_regions_from_config_avoid_excluded_profile_region(
self, monkeypatch
):
monkeypatch.setenv("AWS_DEFAULT_REGION", AWS_REGION_EU_WEST_1)
with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as tmp:
tmp.write(f"aws:\n disallowed_regions:\n - {AWS_REGION_EU_WEST_1}\n")
config_path = tmp.name
try:
aws_provider = AwsProvider(config_path=config_path)
assert aws_provider.identity.profile_region == AWS_REGION_US_EAST_1
finally:
os.remove(config_path)
@mock_aws
def test_aws_provider_raises_when_all_input_regions_are_excluded(self):
with raises(AWSArgumentTypeValidationError):
AwsProvider(
regions={AWS_REGION_EU_WEST_1},
excluded_regions={AWS_REGION_EU_WEST_1},
)
def test_get_excluded_regions_from_env_parses_comma_list(self):
with mock.patch.dict(
os.environ,
{"PROWLER_AWS_DISALLOWED_REGIONS": " me-south-1 , ap-east-1 ,, "},
):
assert Provider.get_excluded_regions_from_env() == {
"me-south-1",
"ap-east-1",
}
def test_get_excluded_regions_from_env_ignores_legacy_generic_name(self):
with mock.patch.dict(
os.environ,
{"PROWLER_DISALLOWED_REGIONS": "me-south-1"},
clear=True,
):
assert Provider.get_excluded_regions_from_env() == set()
def test_get_excluded_regions_from_env_unset(self):
with mock.patch.dict(os.environ, {}, clear=True):
assert Provider.get_excluded_regions_from_env() == set()
@mock_aws
def test_print_credentials_shows_all_except_excluded_regions(self):
aws_provider = AwsProvider(
excluded_regions={AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1}
)
with patch(
"prowler.providers.aws.aws_provider.print_boxes"
) as mock_print_boxes:
aws_provider.print_credentials()
report_lines = mock_print_boxes.call_args.args[0]
assert any(
"AWS Regions:" in line and "all except eu-west-1, us-east-1" in line
for line in report_lines
)
@mock_aws
def test_generate_regional_clients_all_enabled_regions(self):
aws_provider = AwsProvider()
@@ -2033,6 +2159,24 @@ aws:
== AWS_REGION_EU_WEST_1
)
def test_get_aws_region_for_sts_avoids_excluded_session_region(self):
input_regions = None
session_region = AWS_REGION_EU_WEST_1
assert (
get_aws_region_for_sts(
session_region, input_regions, {AWS_REGION_EU_WEST_1}
)
== AWS_REGION_US_EAST_1
)
def test_get_profile_region_avoids_excluded_session_region(self):
mocked_session = mock.Mock(region_name=AWS_REGION_EU_WEST_1)
assert (
AwsProvider.get_profile_region(mocked_session, {AWS_REGION_EU_WEST_1})
== AWS_REGION_US_EAST_1
)
@mock_aws
def test_set_session_config_default(self):
aws_provider = AwsProvider()
@@ -433,6 +433,29 @@ class TestCloudflareValidateCredentials:
with pytest.raises(CloudflareNoAccountsError):
CloudflareProvider.validate_credentials(session)
def test_validate_credentials_breaks_on_repeated_account_ids(self):
"""Pagination must stop when the SDK repeats account IDs to avoid infinite loops."""
def repeating_accounts():
account = MagicMock()
account.id = ACCOUNT_ID
while True:
yield account
mock_client = MagicMock()
mock_client.user.get.side_effect = Exception("Some other error")
mock_client.accounts.list.return_value = repeating_accounts()
session = CloudflareSession(
client=mock_client,
api_token=API_TOKEN,
api_key=None,
api_email=None,
)
# Must return without hanging; repeated IDs break the loop.
CloudflareProvider.validate_credentials(session)
class TestCloudflareTestConnection:
"""Tests for test_connection method."""
@@ -73,8 +73,8 @@ class TestCalendarExternalInvitationsWarning:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure (enabled)"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -100,8 +100,8 @@ class TestCalendarExternalInvitationsWarning:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -104,8 +104,8 @@ class TestCalendarExternalSharingPrimaryCalendar:
assert findings[0].status == "FAIL"
assert "EXTERNAL_ALL_INFO_READ_WRITE" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure (free/busy only)"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -131,8 +131,8 @@ class TestCalendarExternalSharingPrimaryCalendar:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -67,8 +67,8 @@ class TestDriveExternalSharingWarnUsers:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -92,8 +92,8 @@ class TestDriveExternalSharingWarnUsers:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -69,8 +69,8 @@ class TestDriveSharedDriveCreationAllowed:
assert findings[0].status == "FAIL"
assert "prevented" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -94,8 +94,8 @@ class TestDriveSharedDriveCreationAllowed:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -101,8 +101,8 @@ class TestDriveSharedDriveDisableDownloadPrintCopy:
assert findings[0].status == "FAIL"
assert "ALL" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -128,8 +128,8 @@ class TestDriveSharedDriveDisableDownloadPrintCopy:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -71,8 +71,8 @@ class TestDriveWarnSharingWithAllowlistedDomains:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -98,8 +98,8 @@ class TestDriveWarnSharingWithAllowlistedDomains:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -1185,3 +1185,58 @@ class TestInitGlobalProviderRegistryEnumeration:
# The "other/lib" repo should be filtered out by --image-filter
assert not any("other/lib" in img for img in provider.images)
assert len(provider.images) == 3
class TestRegistryListMode:
"""Regression test: `prowler image --registry <url> --registry-list` crashes.
When --registry-list is passed, ImageProvider._enumerate_registry sets
_listing_only = True and __init__ returns early before calling
Provider.set_global_provider(self). The caller in __main__.py then calls
global_provider.print_credentials() on a None reference, raising
AttributeError: 'NoneType' object has no attribute 'print_credentials'.
"""
@patch("prowler.providers.image.image_provider.create_registry_adapter")
@patch("prowler.providers.common.provider.load_and_validate_config_file")
def test_registry_list_does_not_crash(self, mock_load_config, mock_adapter_factory):
"""Reproduce the --registry-list crash by running the same sequence
as __main__.py: init_global_provider, get_global_provider,
then print_credentials."""
mock_load_config.return_value = {}
adapter = MagicMock()
adapter.list_repositories.return_value = ["myorg/app"]
adapter.list_tags.return_value = ["v1.0", "latest"]
mock_adapter_factory.return_value = adapter
arguments = Namespace(
provider="image",
config_file=None,
fixer_config=None,
images=None,
image_list_file=None,
scanners=["vuln"],
image_config_scanners=None,
trivy_severity=None,
ignore_unfixed=False,
timeout="5m",
registry="myregistry.io",
image_filter=None,
tag_filter=None,
max_images=0,
registry_insecure=False,
registry_list_images=True,
)
# Reproduce the exact crash sequence from __main__.py lines 289-294:
# Provider.init_global_provider(args)
# global_provider = Provider.get_global_provider()
# global_provider.print_credentials()
with mock.patch.object(Provider, "_global", None):
Provider.init_global_provider(arguments)
global_provider = Provider.get_global_provider()
# This is the line that crashes: global_provider is None so
# .print_credentials() raises AttributeError.
global_provider.print_credentials()
@@ -0,0 +1,45 @@
from unittest import mock
from prowler.providers.vercel.services.project.project_service import Project
from tests.providers.vercel.vercel_fixtures import (
PROJECT_ID,
PROJECT_NAME,
TEAM_ID,
set_mocked_vercel_provider,
)
class TestProjectService:
def test_list_projects_parses_security_metadata(self):
service = Project.__new__(Project)
service.provider = set_mocked_vercel_provider()
service.projects = {}
service._paginate = mock.MagicMock(
return_value=[
{
"id": PROJECT_ID,
"name": PROJECT_NAME,
"accountId": TEAM_ID,
"security": {
"firewallEnabled": True,
"firewallConfigVersion": 42,
"managedRules": {
"owasp": {"active": True, "action": "log"},
"ai_bots": {"active": False, "action": "deny"},
},
"botIdEnabled": True,
},
}
]
)
service._list_projects()
project = service.projects[PROJECT_ID]
assert project.firewall_enabled is True
assert project.firewall_config_version == "42"
assert project.managed_rules == {
"owasp": {"active": True, "action": "log"},
"ai_bots": {"active": False, "action": "deny"},
}
assert project.bot_id_enabled is True
@@ -0,0 +1,199 @@
from unittest import mock
from prowler.providers.vercel.services.project.project_service import VercelProject
from prowler.providers.vercel.services.security.security_service import Security
from tests.providers.vercel.vercel_fixtures import PROJECT_ID, PROJECT_NAME, TEAM_ID
class TestSecurityService:
def test_fetch_firewall_config_reads_active_version_and_normalizes_response(self):
project = VercelProject(id=PROJECT_ID, name=PROJECT_NAME, team_id=TEAM_ID)
service = Security.__new__(Security)
service.firewall_configs = {}
service._get = mock.MagicMock(
return_value={
"active": {
"firewallEnabled": True,
"managedRules": {
"owasp": {"active": True, "action": "deny"},
"ai_bots": {"active": False, "action": "deny"},
},
"rules": [
{
"id": "rule-custom",
"name": "Block admin access",
"active": True,
"conditionGroup": [
{
"conditions": [
{
"type": "path",
"op": "pre",
"value": "/admin",
}
]
}
],
"action": {
"mitigate": {
"action": "deny",
}
},
},
{
"id": "rule-rate-limit",
"name": "Rate limit login",
"active": True,
"conditionGroup": [
{
"conditions": [
{
"type": "path",
"op": "eq",
"value": "/login",
}
]
}
],
"action": {
"mitigate": {
"action": "deny",
"rateLimit": {
"algo": "fixed_window",
"window": 60,
"limit": 10,
},
}
},
},
],
"ips": [
{
"id": "ip-rule",
"ip": "203.0.113.7",
"action": "deny",
}
],
},
"draft": None,
"versions": [1],
}
)
service._fetch_firewall_config(project)
service._get.assert_called_once_with(
"/v1/security/firewall/config/active",
params={"projectId": PROJECT_ID, "teamId": TEAM_ID},
)
config = service.firewall_configs[PROJECT_ID]
assert config.firewall_enabled is True
assert config.managed_rulesets == {"owasp": {"active": True, "action": "deny"}}
assert [rule["id"] for rule in config.custom_rules] == ["rule-custom"]
assert [rule["id"] for rule in config.rate_limiting_rules] == [
"rule-rate-limit"
]
assert [rule["id"] for rule in config.ip_blocking_rules] == ["ip-rule"]
def test_fetch_firewall_config_parses_crs_managed_rulesets(self):
project = VercelProject(
id=PROJECT_ID,
name=PROJECT_NAME,
team_id=TEAM_ID,
firewall_config_version="1",
)
service = Security.__new__(Security)
service.firewall_configs = {}
service._get = mock.MagicMock(
return_value={
"id": "waf_test",
"version": 1,
"firewallEnabled": True,
"crs": {
"gen": {"active": True, "action": "log"},
"xss": {"active": True, "action": "deny"},
"php": {"active": False, "action": "log"},
},
"rules": [],
"ips": [],
}
)
service._fetch_firewall_config(project)
config = service.firewall_configs[PROJECT_ID]
assert config.firewall_enabled is True
assert config.managed_rulesets == {
"gen": {"active": True, "action": "log"},
"xss": {"active": True, "action": "deny"},
}
def test_fetch_firewall_config_falls_back_to_wrapper_when_active_missing(self):
project = VercelProject(id=PROJECT_ID, name=PROJECT_NAME, team_id=TEAM_ID)
service = Security.__new__(Security)
service.firewall_configs = {}
service._get = mock.MagicMock(
side_effect=[
Exception("404 active config not found"),
{"active": None, "draft": None, "versions": []},
]
)
service._fetch_firewall_config(project)
assert service._get.call_args_list == [
mock.call(
"/v1/security/firewall/config/active",
params={"projectId": PROJECT_ID, "teamId": TEAM_ID},
),
mock.call(
"/v1/security/firewall/config",
params={"projectId": PROJECT_ID, "teamId": TEAM_ID},
),
]
config = service.firewall_configs[PROJECT_ID]
assert config.firewall_enabled is False
assert config.managed_rulesets == {}
assert config.custom_rules == []
assert config.rate_limiting_rules == []
assert config.ip_blocking_rules == []
def test_fetch_firewall_config_uses_project_security_metadata_when_config_empty(
self,
):
project = VercelProject(
id=PROJECT_ID,
name=PROJECT_NAME,
team_id=TEAM_ID,
firewall_enabled=True,
firewall_config_version="42",
managed_rules={
"owasp": {"active": True, "action": "log"},
"ai_bots": {"active": False, "action": "deny"},
},
)
service = Security.__new__(Security)
service.firewall_configs = {}
service._get = mock.MagicMock(
return_value={"active": None, "draft": None, "versions": []}
)
service._fetch_firewall_config(project)
service._get.assert_called_once_with(
"/v1/security/firewall/config/42",
params={"projectId": PROJECT_ID, "teamId": TEAM_ID},
)
config = service.firewall_configs[PROJECT_ID]
assert config.firewall_enabled is True
assert config.managed_rulesets == {"owasp": {"active": True, "action": "log"}}
assert config.custom_rules == []
assert config.rate_limiting_rules == []
assert config.ip_blocking_rules == []
+35 -2
View File
@@ -2,16 +2,50 @@
All notable changes to the **Prowler UI** are documented in this file.
## [1.24.0] (Prowler UNRELEASED)
## [1.24.2] (Prowler v5.24.2)
### 🐞 Fixed
- Default muted filter now applied consistently on the findings page and the finding-group resource drill-down, keeping muted findings hidden unless the "include muted findings" checkbox is opted in [(#10818)](https://github.com/prowler-cloud/prowler/pull/10818)
---
## [1.24.1] (Prowler v5.24.1)
### 🐞 Fixed
- Findings and filter UX fixes: exclude muted findings by default in the resource detail drawer and finding group resource views, show category context label (for example `Status: FAIL`) on MultiSelect triggers instead of hiding the placeholder, and add a `wide` width option for filter dropdowns applied to the findings Scan filter to prevent label truncation [(#10734)](https://github.com/prowler-cloud/prowler/pull/10734)
- Findings grouped view now handles zero-resource IaC counters, refines drawer loading states, and adds provider indicators to finding groups [(#10736)](https://github.com/prowler-cloud/prowler/pull/10736)
- Other Findings for this resource: ordering by `severity` [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
- Other Findings for this resource: show `delta` indicator [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
- Compliance: requirement findings do not show muted findings [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
- Latest new findings: link to finding groups order by `-severity,-last_seen_at` [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
### 🔒 Security
- Upgrade React to 19.2.5 and Next.js to 16.2.3 to mitigate CVE-2026-23869 (React2DoS), a high-severity unauthenticated remote DoS vulnerability in the React Flight Protocol's Server Function deserialization [(#10754)](https://github.com/prowler-cloud/prowler/pull/10754)
---
## [1.24.0] (Prowler v5.24.0)
### 🚀 Added
- Resources side drawer with redesigned detail panel [(#10673)](https://github.com/prowler-cloud/prowler/pull/10673)
- Syntax highlighting for remediation code blocks in finding groups drawer with provider-aware auto-detection (Shell, HCL, YAML, Bicep) [(#10698)](https://github.com/prowler-cloud/prowler/pull/10698)
### 🔄 Changed
- Attack Paths scan selection: contextual button labels based on graph availability, tooltips on disabled actions, green dot indicator for selectable scans, and a warning banner when viewing data from a previous scan cycle [(#10685)](https://github.com/prowler-cloud/prowler/pull/10685)
- Remove legacy finding detail sheet, row-details wrapper, and resource detail panel; unify findings and resources around new side drawers [(#10692)](https://github.com/prowler-cloud/prowler/pull/10692)
- Attack Paths "View Finding" now opens the finding drawer inline over the graph instead of navigating to `/findings` in a new tab, preserving graph zoom, selection, and filter state
- Attack Paths scan table: replace action buttons with radio buttons, add dedicated Graph column, use info-colored In Progress badge, remove redundant Progress column, and fix info banner variant [(#10704)](https://github.com/prowler-cloud/prowler/pull/10704)
### 🐞 Fixed
- Findings group resource filters now strip unsupported scan parameters, display scan name instead of provider alias in filter badges, migrate mute modal from HeroUI to shadcn, and add searchable accounts/provider type selectors [(#10662)](https://github.com/prowler-cloud/prowler/pull/10662)
- Compliance detail page header now reflects the actual provider, alias and UID of the selected scan instead of always defaulting to AWS [(#10674)](https://github.com/prowler-cloud/prowler/pull/10674)
- Provider wizard modal moved to a stable page-level host so the providers table refreshes after link, authenticate, and connection check without closing the modal [(#10675)](https://github.com/prowler-cloud/prowler/pull/10675)
---
@@ -40,7 +74,6 @@ All notable changes to the **Prowler UI** are documented in this file.
### 🐞 Fixed
- Preserve query parameters in callbackUrl during invitation flow [(#10571)](https://github.com/prowler-cloud/prowler/pull/10571)
- Deleting the active organization now switches to the target org before deleting, preventing JWT rejection from the backend [(#10491)](https://github.com/prowler-cloud/prowler/pull/10491)
- Clear Filters now resets all filters including muted findings and auto-applies, Clear all in pills only removes pill-visible sub-filters, and the discard icon is now an Undo text button [(#10446)](https://github.com/prowler-cloud/prowler/pull/10446)
- Send to Jira modal now dynamically fetches and displays available issue types per project instead of hardcoding `"Task"`, fixing failures on non-English Jira instances [(#10534)](https://github.com/prowler-cloud/prowler/pull/10534)
- Exclude service filter from finding group resources endpoint to prevent empty results when a service filter is active [(#10652)](https://github.com/prowler-cloud/prowler/pull/10652)
@@ -70,7 +70,7 @@ describe("getFindingGroups — default sort for muted and non-muted rows", () =>
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-new_fail_count,-changed_fail_count,-severity,-fail_count,-last_seen_at",
"-status,-severity,-new_fail_count,-changed_fail_count,-fail_count,-last_seen_at",
);
});
@@ -84,7 +84,7 @@ describe("getFindingGroups — default sort for muted and non-muted rows", () =>
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-new_fail_count,-changed_fail_count,-severity,-new_fail_muted_count,-changed_fail_muted_count,-fail_count,-fail_muted_count,-last_seen_at",
"-status,-severity,-new_fail_count,-changed_fail_count,-new_fail_muted_count,-changed_fail_muted_count,-fail_count,-fail_muted_count,-last_seen_at",
);
});
});
@@ -106,7 +106,7 @@ describe("getLatestFindingGroups — default sort for muted and non-muted rows",
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-new_fail_count,-changed_fail_count,-severity,-fail_count,-last_seen_at",
"-status,-severity,-new_fail_count,-changed_fail_count,-fail_count,-last_seen_at",
);
});
@@ -120,7 +120,7 @@ describe("getLatestFindingGroups — default sort for muted and non-muted rows",
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-new_fail_count,-changed_fail_count,-severity,-new_fail_muted_count,-changed_fail_muted_count,-fail_count,-fail_muted_count,-last_seen_at",
"-status,-severity,-new_fail_count,-changed_fail_count,-new_fail_muted_count,-changed_fail_muted_count,-fail_count,-fail_muted_count,-last_seen_at",
);
});
});
@@ -262,7 +262,7 @@ describe("getFindingGroupResources — Blocker 1: FAIL-first sort", () => {
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-delta,-severity,-last_seen_at",
"-status,-severity,-delta,-last_seen_at",
);
});
@@ -300,7 +300,7 @@ describe("getLatestFindingGroupResources — Blocker 1: FAIL-first sort", () =>
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-delta,-severity,-last_seen_at",
"-status,-severity,-delta,-last_seen_at",
);
});
@@ -344,7 +344,7 @@ describe("getFindingGroupResources — triangulation: params coexist", () => {
expect(url.searchParams.get("page[number]")).toBe("2");
expect(url.searchParams.get("page[size]")).toBe("50");
expect(url.searchParams.get("sort")).toBe(
"-status,-delta,-severity,-last_seen_at",
"-status,-severity,-delta,-last_seen_at",
);
expect(url.searchParams.get("filter[status]")).toBeNull();
});
@@ -372,7 +372,7 @@ describe("getLatestFindingGroupResources — triangulation: params coexist", ()
expect(url.searchParams.get("page[number]")).toBe("3");
expect(url.searchParams.get("page[size]")).toBe("20");
expect(url.searchParams.get("sort")).toBe(
"-status,-delta,-severity,-last_seen_at",
"-status,-severity,-delta,-last_seen_at",
);
expect(url.searchParams.get("filter[status]")).toBeNull();
});
@@ -443,7 +443,7 @@ describe("getFindingGroupResources — caller filters are preserved", () => {
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-delta,-severity,-last_seen_at",
"-status,-severity,-delta,-last_seen_at",
);
expect(url.searchParams.get("filter[name__icontains]")).toBe("bucket-prod");
expect(url.searchParams.get("filter[severity__in]")).toBe("high");
@@ -533,7 +533,7 @@ describe("getLatestFindingGroupResources — caller filters are preserved", () =
const calledUrl = fetchMock.mock.calls[0][0] as string;
const url = new URL(calledUrl);
expect(url.searchParams.get("sort")).toBe(
"-status,-delta,-severity,-last_seen_at",
"-status,-severity,-delta,-last_seen_at",
);
expect(url.searchParams.get("filter[name__icontains]")).toBe(
"instance-prod",
+3 -3
View File
@@ -83,13 +83,13 @@ function normalizeFindingGroupResourceFilters(
}
const DEFAULT_FINDING_GROUPS_SORT =
"-status,-new_fail_count,-changed_fail_count,-severity,-fail_count,-last_seen_at";
"-status,-severity,-new_fail_count,-changed_fail_count,-fail_count,-last_seen_at";
const DEFAULT_FINDING_GROUPS_SORT_WITH_MUTED =
"-status,-new_fail_count,-changed_fail_count,-severity,-new_fail_muted_count,-changed_fail_muted_count,-fail_count,-fail_muted_count,-last_seen_at";
"-status,-severity,-new_fail_count,-changed_fail_count,-new_fail_muted_count,-changed_fail_muted_count,-fail_count,-fail_muted_count,-last_seen_at";
const DEFAULT_FINDING_GROUP_RESOURCES_SORT =
"-status,-delta,-severity,-last_seen_at";
"-status,-severity,-delta,-last_seen_at";
interface FetchFindingGroupsParams {
page?: number;
@@ -115,4 +115,35 @@ describe("adaptFindingsByResourceResponse — malformed input", () => {
expect(result[0].id).toBe("finding-1");
expect(result[0].checkId).toBe("s3_check");
});
it("should normalize a single finding response into a one-item drawer array", () => {
// Given — getFindingById returns a single JSON:API resource object
const input = {
data: {
id: "finding-1",
attributes: {
uid: "uid-1",
check_id: "s3_check",
status: "FAIL",
severity: "critical",
check_metadata: {
checktitle: "S3 Check",
},
},
relationships: {
resources: { data: [] },
scan: { data: null },
},
},
included: [],
};
// When
const result = adaptFindingsByResourceResponse(input);
// Then
expect(result).toHaveLength(1);
expect(result[0].id).toBe("finding-1");
expect(result[0].checkTitle).toBe("S3 Check");
});
});
@@ -165,16 +165,18 @@ type IncludedDict = Record<string, IncludedItem>;
* then resolves each finding's resource and provider relationships.
*/
interface JsonApiResponse {
data: FindingApiItem[];
data: FindingApiItem | FindingApiItem[];
included?: Record<string, unknown>[];
}
function isJsonApiResponse(value: unknown): value is JsonApiResponse {
const data = (value as { data?: unknown })?.data;
return (
value !== null &&
typeof value === "object" &&
"data" in value &&
Array.isArray((value as { data: unknown }).data)
(Array.isArray(data) || (data !== null && typeof data === "object"))
);
}
@@ -188,8 +190,11 @@ export function adaptFindingsByResourceResponse(
const resourcesDict = createDict("resources", apiResponse) as IncludedDict;
const scansDict = createDict("scans", apiResponse) as IncludedDict;
const providersDict = createDict("providers", apiResponse) as IncludedDict;
const findings = Array.isArray(apiResponse.data)
? apiResponse.data
: [apiResponse.data];
return apiResponse.data.map((item) => {
return findings.map((item) => {
const attrs = item.attributes;
const meta = (attrs.check_metadata || {}) as Record<string, unknown>;
const remediationRaw = meta.remediation as
@@ -43,6 +43,7 @@ vi.mock("@/actions/finding-groups", () => ({
}));
import {
getLatestFindingsByResourceUid,
resolveFindingIdsByCheckIds,
resolveFindingIdsByVisibleGroupResources,
} from "./findings-by-resource";
@@ -262,3 +263,46 @@ describe("resolveFindingIdsByVisibleGroupResources", () => {
expect(fetchMock).not.toHaveBeenCalled();
});
});
describe("getLatestFindingsByResourceUid", () => {
beforeEach(() => {
vi.clearAllMocks();
vi.stubGlobal("fetch", fetchMock);
getAuthHeadersMock.mockResolvedValue({ Authorization: "Bearer token" });
handleApiResponseMock.mockResolvedValue({ data: [] });
});
it("should restrict to FAIL, exclude muted findings, and apply severity/time sorting by default", async () => {
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
await getLatestFindingsByResourceUid({
resourceUid: "resource-1",
});
const calledUrl = new URL(fetchMock.mock.calls[0][0]);
expect(calledUrl.pathname).toBe("/api/v1/findings/latest");
expect(calledUrl.searchParams.get("filter[resource_uid]")).toBe(
"resource-1",
);
// Status filter is applied server-side so the page[size]=50 window
// always holds FAIL rows — guards against PASS-heavy resources
// starving FAILs out of the result.
expect(calledUrl.searchParams.get("filter[status]")).toBe("FAIL");
expect(calledUrl.searchParams.get("filter[muted]")).toBe("false");
expect(calledUrl.searchParams.get("sort")).toBe("severity,-updated_at");
});
it("should include muted findings only when explicitly requested", async () => {
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
await getLatestFindingsByResourceUid({
resourceUid: "resource-1",
includeMuted: true,
});
const calledUrl = new URL(fetchMock.mock.calls[0][0]);
expect(calledUrl.searchParams.get("filter[status]")).toBe("FAIL");
expect(calledUrl.searchParams.get("filter[muted]")).toBe("include");
expect(calledUrl.searchParams.get("sort")).toBe("severity,-updated_at");
});
});
+4 -2
View File
@@ -250,10 +250,12 @@ export const getLatestFindingsByResourceUid = async ({
resourceUid,
page = 1,
pageSize = 50,
includeMuted = false,
}: {
resourceUid: string;
page?: number;
pageSize?: number;
includeMuted?: boolean;
}) => {
const headers = await getAuthHeaders({ contentType: false });
@@ -263,8 +265,8 @@ export const getLatestFindingsByResourceUid = async ({
url.searchParams.append("filter[resource_uid]", resourceUid);
url.searchParams.append("filter[status]", "FAIL");
url.searchParams.append("filter[muted]", "include");
url.searchParams.append("sort", "-severity,-updated_at");
url.searchParams.append("filter[muted]", includeMuted ? "include" : "false");
url.searchParams.append("sort", "severity,-updated_at");
if (page) url.searchParams.append("page[number]", page.toString());
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
+9 -1
View File
@@ -141,7 +141,15 @@ export const getLatestMetadataInfo = async ({
}
};
export const getFindingById = async (findingId: string, include = "") => {
interface GetFindingByIdOptions {
source?: "resource-detail-drawer";
}
export const getFindingById = async (
findingId: string,
include = "",
_options?: GetFindingByIdOptions,
) => {
const headers = await getAuthHeaders({ contentType: false });
const url = new URL(`${apiBaseUrl}/findings/${findingId}`);
+135
View File
@@ -0,0 +1,135 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
const {
fetchMock,
getAuthHeadersMock,
getFormValueMock,
handleApiErrorMock,
handleApiResponseMock,
} = vi.hoisted(() => ({
fetchMock: vi.fn(),
getAuthHeadersMock: vi.fn(),
getFormValueMock: vi.fn(),
handleApiErrorMock: vi.fn(),
handleApiResponseMock: vi.fn(),
}));
vi.mock("next/cache", () => ({
revalidatePath: vi.fn(),
}));
vi.mock("next/navigation", () => ({
redirect: vi.fn(),
}));
vi.mock("@/lib", () => ({
apiBaseUrl: "https://api.example.com/api/v1",
getAuthHeaders: getAuthHeadersMock,
getFormValue: getFormValueMock,
wait: vi.fn(),
}));
vi.mock("@/lib/provider-credentials/build-crendentials", () => ({
buildSecretConfig: vi.fn(() => ({
secretType: "access-secret-key",
secret: { key: "value" },
})),
}));
vi.mock("@/lib/provider-filters", () => ({
appendSanitizedProviderInFilters: vi.fn(),
}));
vi.mock("@/lib/server-actions-helper", () => ({
handleApiError: handleApiErrorMock,
handleApiResponse: handleApiResponseMock,
}));
import {
addCredentialsProvider,
addProvider,
checkConnectionProvider,
updateCredentialsProvider,
} from "./providers";
describe("providers actions", () => {
beforeEach(() => {
vi.clearAllMocks();
vi.stubGlobal("fetch", fetchMock);
getAuthHeadersMock.mockResolvedValue({ Authorization: "Bearer token" });
getFormValueMock.mockImplementation((formData: FormData, field: string) =>
formData.get(field),
);
handleApiErrorMock.mockReturnValue({ error: "Unexpected error" });
handleApiResponseMock.mockResolvedValue({ data: { id: "secret-1" } });
fetchMock.mockResolvedValue(
new Response(JSON.stringify({ data: { id: "secret-1" } }), {
status: 200,
headers: { "Content-Type": "application/json" },
}),
);
});
it("should revalidate providers after linking a cloud provider", async () => {
// Given
const formData = new FormData();
formData.set("providerType", "aws");
formData.set("providerUid", "111111111111");
// When
await addProvider(formData);
// Then
expect(handleApiResponseMock).toHaveBeenCalledWith(
expect.any(Response),
"/providers",
);
});
it("should revalidate providers after adding credentials in the wizard", async () => {
// Given
const formData = new FormData();
formData.set("providerId", "provider-1");
formData.set("providerType", "aws");
// When
await addCredentialsProvider(formData);
// Then
expect(handleApiResponseMock).toHaveBeenCalledWith(
expect.any(Response),
"/providers",
);
});
it("should revalidate providers after updating credentials in the wizard", async () => {
// Given
const formData = new FormData();
formData.set("providerId", "provider-1");
formData.set("providerType", "oraclecloud");
// When
await updateCredentialsProvider("secret-1", formData);
// Then
expect(handleApiResponseMock).toHaveBeenCalledWith(
expect.any(Response),
"/providers",
);
});
it("should revalidate providers when checking connection from the wizard", async () => {
// Given
const formData = new FormData();
formData.set("providerId", "provider-1");
// When
await checkConnectionProvider(formData);
// Then
expect(handleApiResponseMock).toHaveBeenCalledWith(
expect.any(Response),
"/providers",
);
});
});
+1
View File
@@ -3,6 +3,7 @@ export {
getLatestResources,
getMetadataInfo,
getResourceById,
getResourceDrawerData,
getResourceEvents,
getResources,
} from "./resources";
+57
View File
@@ -2,9 +2,12 @@
import { redirect } from "next/navigation";
import { getLatestFindings } from "@/actions/findings";
import { listOrganizationsSafe } from "@/actions/organizations/organizations";
import { apiBaseUrl, getAuthHeaders } from "@/lib";
import { appendSanitizedProviderTypeFilters } from "@/lib/provider-filters";
import { handleApiResponse } from "@/lib/server-actions-helper";
import { OrganizationResource } from "@/types/organizations";
export const getResources = async ({
page = 1,
@@ -255,3 +258,57 @@ export const getResourceById = async (
return undefined;
}
};
export const getResourceDrawerData = async ({
resourceId,
resourceUid,
providerId,
providerType,
page = 1,
pageSize = 10,
query = "",
}: {
resourceId: string;
resourceUid: string;
providerId: string;
providerType: string;
page?: number;
pageSize?: number;
query?: string;
}) => {
const isCloudEnv = process.env.NEXT_PUBLIC_IS_CLOUD_ENV === "true";
const [resourceData, findingsResponse, organizationsResponse] =
await Promise.all([
getResourceById(resourceId, { fields: ["tags"] }),
getLatestFindings({
page,
pageSize,
query,
sort: "severity,-inserted_at",
filters: {
"filter[resource_uid]": resourceUid,
"filter[status]": "FAIL",
},
}),
isCloudEnv && providerType === "aws"
? listOrganizationsSafe()
: Promise.resolve({ data: [] }),
]);
const providerOrg =
providerType === "aws"
? (organizationsResponse.data.find((organization: OrganizationResource) =>
organization.relationships?.providers?.data?.some(
(provider: { id: string }) => provider.id === providerId,
),
) ?? null)
: null;
return {
findings: findingsResponse?.data ?? [],
findingsMeta: findingsResponse?.meta ?? null,
providerOrg,
resourceTags: resourceData?.data?.attributes.tags ?? {},
};
};
@@ -0,0 +1,16 @@
import { readFileSync } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { describe, expect, it } from "vitest";
describe("findings view overview SSR", () => {
const currentDir = path.dirname(fileURLToPath(import.meta.url));
const filePath = path.join(currentDir, "findings-view.ssr.tsx");
const source = readFileSync(filePath, "utf8");
it("uses the non-legacy latest findings columns", () => {
expect(source).toContain("ColumnLatestFindings");
expect(source).not.toContain("ColumnNewFindingsToDate");
});
});
@@ -3,7 +3,8 @@
import { getLatestFindings } from "@/actions/findings/findings";
import { LighthouseBanner } from "@/components/lighthouse/banner";
import { LinkToFindings } from "@/components/overview";
import { ColumnNewFindingsToDate } from "@/components/overview/new-findings-table/table/column-new-findings-to-date";
import { ColumnLatestFindings } from "@/components/overview/new-findings-table/table";
import { CardTitle } from "@/components/shadcn";
import { DataTable } from "@/components/ui/table";
import { createDict } from "@/lib/helper";
import { FindingProps, SearchParamsProps } from "@/types";
@@ -57,24 +58,23 @@ export async function FindingsViewSSR({ searchParams }: FindingsViewSSRProps) {
};
return (
<div className="flex w-full flex-col gap-6">
<div className="flex w-full flex-col">
<LighthouseBanner />
<div className="relative w-full flex-col justify-between md:flex-row">
<div className="flex w-full flex-col items-start gap-2 md:flex-row md:items-center">
<h3 className="text-sm font-bold text-nowrap whitespace-nowrap uppercase">
Latest new failing findings
</h3>
<p className="text-text-neutral-tertiary text-xs whitespace-nowrap">
Showing the latest 10 new failing findings by severity.
</p>
<LinkToFindings />
</div>
</div>
<DataTable
key={`dashboard-findings-${Date.now()}`}
columns={ColumnNewFindingsToDate}
columns={ColumnLatestFindings}
data={(expandedResponse?.data || []) as FindingProps[]}
header={
<div className="flex w-full items-center justify-between gap-4">
<div className="flex flex-col gap-0.5">
<CardTitle>Latest New Failed Findings</CardTitle>
<p className="text-text-neutral-tertiary text-xs">
Showing the latest 10 sorted by severity
</p>
</div>
<LinkToFindings />
</div>
}
/>
</div>
);
@@ -1,6 +1,7 @@
import { Skeleton } from "@heroui/skeleton";
import { Suspense } from "react";
import { SkeletonTableNewFindings } from "@/components/overview/new-findings-table/table";
import { SearchParamsProps } from "@/types";
import { GraphsTabsClient } from "./_components/graphs-tabs-client";
@@ -18,6 +19,10 @@ const LoadingFallback = () => (
</div>
);
const TAB_FALLBACKS: Partial<Record<TabId, React.ReactNode>> = {
findings: <SkeletonTableNewFindings />,
};
type GraphComponent = React.ComponentType<{ searchParams: SearchParamsProps }>;
const GRAPH_COMPONENTS: Record<TabId, GraphComponent> = {
@@ -38,9 +43,10 @@ export const GraphsTabsWrapper = async ({
const tabsContent = Object.fromEntries(
GRAPH_TABS.map((tab) => {
const Component = GRAPH_COMPONENTS[tab.id];
const fallback = TAB_FALLBACKS[tab.id] ?? <LoadingFallback />;
return [
tab.id,
<Suspense key={tab.id} fallback={<LoadingFallback />}>
<Suspense key={tab.id} fallback={fallback}>
<Component searchParams={searchParams} />
</Suspense>,
];
@@ -0,0 +1,93 @@
import { render, screen } from "@testing-library/react";
import userEvent from "@testing-library/user-event";
import type { ReactNode } from "react";
import { describe, expect, it, vi } from "vitest";
import type { GraphNode } from "@/types/attack-paths";
import { NodeDetailPanel } from "./node-detail-panel";
vi.mock("@/components/ui/sheet/sheet", () => ({
Sheet: ({ children }: { children: ReactNode }) => <div>{children}</div>,
SheetContent: ({ children }: { children: ReactNode }) => (
<div>{children}</div>
),
SheetDescription: ({ children }: { children: ReactNode }) => (
<div>{children}</div>
),
SheetHeader: ({ children }: { children: ReactNode }) => <div>{children}</div>,
SheetTitle: ({ children }: { children: ReactNode }) => <div>{children}</div>,
}));
vi.mock("./node-overview", () => ({
NodeOverview: () => <div>Node overview</div>,
}));
vi.mock("./node-findings", () => ({
NodeFindings: () => <div>Node findings</div>,
}));
vi.mock("./node-resources", () => ({
NodeResources: () => <div>Node resources</div>,
}));
const findingNode: GraphNode = {
id: "graph-node-id",
labels: ["ProwlerFinding"],
properties: {
id: "finding-123",
check_title: "Open S3 bucket",
name: "Open S3 bucket",
},
};
const resourceNode: GraphNode = {
id: "resource-node-id",
labels: ["S3Bucket"],
properties: {
id: "bucket-123",
name: "bucket-123",
},
};
describe("NodeDetailPanel", () => {
it("renders the view finding button only for finding nodes", () => {
const { rerender } = render(<NodeDetailPanel node={findingNode} />);
expect(
screen.getByRole("button", { name: /view finding finding-123/i }),
).toBeInTheDocument();
rerender(<NodeDetailPanel node={resourceNode} />);
expect(
screen.queryByRole("button", { name: /view finding/i }),
).not.toBeInTheDocument();
});
it("calls onViewFinding with the node finding id", async () => {
const user = userEvent.setup();
const onViewFinding = vi.fn();
render(
<NodeDetailPanel node={findingNode} onViewFinding={onViewFinding} />,
);
await user.click(
screen.getByRole("button", { name: /view finding finding-123/i }),
);
expect(onViewFinding).toHaveBeenCalledWith("finding-123");
});
it("disables the button and shows the spinner while loading", () => {
render(<NodeDetailPanel node={findingNode} viewFindingLoading />);
const button = screen.getByRole("button", {
name: /view finding finding-123/i,
});
expect(button).toBeDisabled();
expect(screen.getByLabelText("Loading")).toHaveClass("size-4");
});
});
@@ -1,6 +1,7 @@
"use client";
import { Button, Card, CardContent } from "@/components/shadcn";
import { Spinner } from "@/components/shadcn/spinner/spinner";
import {
Sheet,
SheetContent,
@@ -18,6 +19,8 @@ interface NodeDetailPanelProps {
node: GraphNode | null;
allNodes?: GraphNode[];
onClose?: () => void;
onViewFinding?: (findingId: string) => void;
viewFindingLoading?: boolean;
}
/**
@@ -26,9 +29,13 @@ interface NodeDetailPanelProps {
export const NodeDetailContent = ({
node,
allNodes = [],
onViewFinding,
viewFindingLoading = false,
}: {
node: GraphNode;
allNodes?: GraphNode[];
onViewFinding?: (findingId: string) => void;
viewFindingLoading?: boolean;
}) => {
const isProwlerFinding = node?.labels.some((label) =>
label.toLowerCase().includes("finding"),
@@ -56,7 +63,12 @@ export const NodeDetailContent = ({
<div className="text-text-neutral-secondary dark:text-text-neutral-secondary text-xs">
Findings connected to this node
</div>
<NodeFindings node={node} allNodes={allNodes} />
<NodeFindings
node={node}
allNodes={allNodes}
onViewFinding={onViewFinding}
viewFindingLoading={viewFindingLoading}
/>
</CardContent>
</Card>
)}
@@ -88,12 +100,15 @@ export const NodeDetailPanel = ({
node,
allNodes = [],
onClose,
onViewFinding,
viewFindingLoading = false,
}: NodeDetailPanelProps) => {
const isOpen = node !== null;
const isProwlerFinding = node?.labels.some((label) =>
label.toLowerCase().includes("finding"),
);
const findingId = node ? String(node.properties?.id || node.id) : "";
return (
<Sheet open={isOpen} onOpenChange={(open) => !open && onClose?.()}>
@@ -107,15 +122,19 @@ export const NodeDetailPanel = ({
</SheetDescription>
</div>
{node && isProwlerFinding && (
<Button asChild variant="default" size="sm" className="mt-1">
<a
href={`/findings?id=${String(node.properties?.id || node.id)}`}
target="_blank"
rel="noopener noreferrer"
aria-label={`View finding ${String(node.properties?.id || node.id)}`}
>
View Finding
</a>
<Button
variant="default"
size="sm"
className="mt-1"
onClick={() => onViewFinding?.(findingId)}
disabled={viewFindingLoading}
aria-label={`View finding ${findingId}`}
>
{viewFindingLoading ? (
<Spinner className="size-4" />
) : (
"View Finding →"
)}
</Button>
)}
</div>
@@ -123,7 +142,12 @@ export const NodeDetailPanel = ({
{node && (
<div className="pt-6">
<NodeDetailContent node={node} allNodes={allNodes} />
<NodeDetailContent
node={node}
allNodes={allNodes}
onViewFinding={onViewFinding}
viewFindingLoading={viewFindingLoading}
/>
</div>
)}
</SheetContent>
@@ -1,5 +1,7 @@
"use client";
import { Button } from "@/components/shadcn";
import { Spinner } from "@/components/shadcn/spinner/spinner";
import { SeverityBadge } from "@/components/ui/table/severity-badge";
import type { GraphNode } from "@/types/attack-paths";
@@ -16,13 +18,20 @@ type Severity = (typeof SEVERITY_LEVELS)[keyof typeof SEVERITY_LEVELS];
interface NodeFindingsProps {
node: GraphNode;
allNodes?: GraphNode[];
onViewFinding?: (findingId: string) => void;
viewFindingLoading?: boolean;
}
/**
* Node findings section showing related findings for the selected node
* Displays findings that are connected to the node via HAS_FINDING edges
*/
export const NodeFindings = ({ node, allNodes = [] }: NodeFindingsProps) => {
export const NodeFindings = ({
node,
allNodes = [],
onViewFinding,
viewFindingLoading = false,
}: NodeFindingsProps) => {
// Get finding IDs from the node's findings array (populated by adapter)
const findingIds = node.findings || [];
@@ -79,15 +88,20 @@ export const NodeFindings = ({ node, allNodes = [] }: NodeFindingsProps) => {
ID: {findingId}
</p>
</div>
<a
href={`/findings?id=${findingId}`}
target="_blank"
rel="noopener noreferrer"
<Button
variant="link"
size="sm"
onClick={() => onViewFinding?.(findingId)}
disabled={viewFindingLoading}
aria-label={`View full finding for ${findingName}`}
className="text-text-info dark:text-text-info h-auto shrink-0 p-0 text-xs font-medium hover:underline"
>
View Full Finding
</a>
{viewFindingLoading ? (
<Spinner className="size-4" />
) : (
"View Full Finding →"
)}
</Button>
</div>
{finding.properties?.description && (
<div className="text-text-neutral-secondary dark:text-text-neutral-secondary mt-2 text-xs">
@@ -1,8 +1,8 @@
"use client";
import Link from "next/link";
import { Badge } from "@/components/shadcn/badge/badge";
import { Button } from "@/components/shadcn/button/button";
import { Spinner } from "@/components/shadcn/spinner/spinner";
interface Finding {
id: string;
@@ -13,12 +13,18 @@ interface Finding {
interface NodeRemediationProps {
findings: Finding[];
onViewFinding?: (findingId: string) => void;
viewFindingLoading?: boolean;
}
/**
* Node remediation section showing related Prowler findings
*/
export const NodeRemediation = ({ findings }: NodeRemediationProps) => {
export const NodeRemediation = ({
findings,
onViewFinding,
viewFindingLoading = false,
}: NodeRemediationProps) => {
const getSeverityVariant = (severity: string) => {
switch (severity) {
case "critical":
@@ -66,15 +72,20 @@ export const NodeRemediation = ({ findings }: NodeRemediationProps) => {
</div>
</div>
<div className="mt-2">
<Link
href={`/findings?id=${finding.id}`}
target="_blank"
rel="noopener noreferrer"
<Button
variant="link"
size="sm"
onClick={() => onViewFinding?.(finding.id)}
disabled={viewFindingLoading}
aria-label={`View full finding for ${finding.title}`}
className="text-text-info dark:text-text-info text-sm transition-all hover:opacity-80 dark:hover:opacity-80"
className="text-text-info dark:text-text-info h-auto p-0 text-sm transition-all hover:opacity-80 dark:hover:opacity-80"
>
View Full Finding
</Link>
{viewFindingLoading ? (
<Spinner className="size-4" />
) : (
"View Full Finding →"
)}
</Button>
</div>
</div>
))}
@@ -24,6 +24,19 @@ vi.mock("next/navigation", () => ({
useSearchParams: () => navigationState.searchParams,
}));
vi.mock("@/components/shadcn/tooltip", () => ({
Tooltip: ({ children }: { children: ReactNode }) => <>{children}</>,
TooltipTrigger: ({
children,
}: {
children: ReactNode;
asChild?: boolean;
}) => <>{children}</>,
TooltipContent: ({ children }: { children: ReactNode }) => (
<span data-testid="tooltip-content">{children}</span>
),
}));
vi.mock("@/components/ui/entities/entity-info", () => ({
EntityInfo: ({
entityAlias,
@@ -156,14 +169,14 @@ describe("ScanListTable", () => {
expect(screen.getByText("12 Total Entries")).toBeInTheDocument();
expect(screen.getByText("Page 1 of 3")).toBeInTheDocument();
await user.click(screen.getAllByRole("button", { name: "Select scan" })[0]);
await user.click(screen.getAllByRole("radio", { name: "Select scan" })[0]);
expect(pushMock).toHaveBeenCalledWith(
"/attack-paths?scanPage=1&scanPageSize=5&scanId=scan-1",
);
});
it("enables the select button for a failed scan when graph data is ready", async () => {
it("enables the radio button for a failed scan when graph data is ready", async () => {
const user = userEvent.setup();
const failedScan: AttackPathScan = {
...createScan(1),
@@ -176,18 +189,18 @@ describe("ScanListTable", () => {
render(<ScanListTable scans={[failedScan]} />);
const button = screen.getByRole("button", { name: "Select scan" });
expect(button).toBeEnabled();
expect(button).toHaveTextContent("Select");
const radio = screen.getByRole("radio", { name: "Select scan" });
expect(radio).toBeEnabled();
expect(radio).toHaveAttribute("aria-checked", "false");
await user.click(button);
await user.click(radio);
expect(pushMock).toHaveBeenCalledWith(
"/attack-paths?scanPage=1&scanPageSize=5&scanId=scan-1",
);
});
it("disables the select button for a failed scan when graph data is not ready", () => {
it("disables the radio button for a failed scan when graph data is not ready", () => {
const failedScan: AttackPathScan = {
...createScan(1),
attributes: {
@@ -199,8 +212,114 @@ describe("ScanListTable", () => {
render(<ScanListTable scans={[failedScan]} />);
const button = screen.getByRole("button", { name: "Select scan" });
expect(button).toBeDisabled();
expect(button).toHaveTextContent("Failed");
const radio = screen.getByRole("radio", { name: "Scan not available" });
expect(radio).toBeDisabled();
});
it("shows a disabled radio button for a scheduled scan without graph data", () => {
const scheduledScan: AttackPathScan = {
...createScan(1),
attributes: {
...createScan(1).attributes,
state: "scheduled",
progress: 0,
graph_data_ready: false,
completed_at: null,
duration: null,
},
};
render(<ScanListTable scans={[scheduledScan]} />);
const radio = screen.getByRole("radio", { name: "Scan not available" });
expect(radio).toBeDisabled();
});
it("shows a disabled radio button for an executing scan without graph data", () => {
const executingScan: AttackPathScan = {
...createScan(1),
attributes: {
...createScan(1).attributes,
state: "executing",
progress: 45,
graph_data_ready: false,
completed_at: null,
duration: null,
},
};
render(<ScanListTable scans={[executingScan]} />);
const radio = screen.getByRole("radio", { name: "Scan not available" });
expect(radio).toBeDisabled();
});
it("enables the radio button for a scheduled scan when graph data is ready from a previous cycle", async () => {
const user = userEvent.setup();
const scheduledWithGraph: AttackPathScan = {
...createScan(1),
attributes: {
...createScan(1).attributes,
state: "scheduled",
progress: 0,
graph_data_ready: true,
},
};
render(<ScanListTable scans={[scheduledWithGraph]} />);
const radio = screen.getByRole("radio", { name: "Select scan" });
expect(radio).toBeEnabled();
expect(radio).toHaveAttribute("aria-checked", "false");
await user.click(radio);
expect(pushMock).toHaveBeenCalledWith(
"/attack-paths?scanPage=1&scanPageSize=5&scanId=scan-1",
);
});
it("exposes an accessible label in the Graph column when graph data is ready", () => {
render(<ScanListTable scans={[createScan(1)]} />);
expect(screen.getByLabelText("Graph available")).toHaveClass(
"text-text-success-primary",
);
});
it("exposes an accessible label in the Graph column when graph data is not ready", () => {
const noGraphScan: AttackPathScan = {
...createScan(1),
attributes: {
...createScan(1).attributes,
graph_data_ready: false,
},
};
render(<ScanListTable scans={[noGraphScan]} />);
expect(screen.getByLabelText("Graph not available")).toHaveClass(
"text-text-neutral-secondary",
);
});
it("renders a tooltip explaining a completed scan without graph data", () => {
const completedNoGraph: AttackPathScan = {
...createScan(1),
attributes: {
...createScan(1).attributes,
state: "completed",
graph_data_ready: false,
},
};
render(<ScanListTable scans={[completedNoGraph]} />);
expect(
screen.getByRole("radio", { name: "Scan not available" }),
).toBeDisabled();
expect(
screen.getByText("This scan completed without producing graph data."),
).toBeInTheDocument();
});
});
@@ -1,15 +1,25 @@
"use client";
import { ColumnDef } from "@tanstack/react-table";
import { Check, Minus } from "lucide-react";
import { usePathname, useRouter, useSearchParams } from "next/navigation";
import { Button } from "@/components/shadcn/button/button";
import {
RadioGroup,
RadioGroupItem,
} from "@/components/shadcn/radio-group/radio-group";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/shadcn/tooltip";
import { DateWithTime } from "@/components/ui/entities/date-with-time";
import { EntityInfo } from "@/components/ui/entities/entity-info";
import { DataTable, DataTableColumnHeader } from "@/components/ui/table";
import { formatDuration } from "@/lib/date-utils";
import { cn } from "@/lib/utils";
import type { MetaDataProps, ProviderType } from "@/types";
import type { AttackPathScan, ScanState } from "@/types/attack-paths";
import type { AttackPathScan } from "@/types/attack-paths";
import { SCAN_STATES } from "@/types/attack-paths";
import { ScanStatusBadge } from "./scan-status-badge";
@@ -20,12 +30,6 @@ interface ScanListTableProps {
const DEFAULT_PAGE_SIZE = 5;
const PAGE_SIZE_OPTIONS = [2, 5, 10, 15];
const WAITING_STATES: readonly ScanState[] = [
SCAN_STATES.SCHEDULED,
SCAN_STATES.AVAILABLE,
SCAN_STATES.EXECUTING,
];
const parsePageParam = (value: string | null, fallback: number) => {
if (!value) return fallback;
@@ -38,34 +42,32 @@ const formatNullableDuration = (duration: number | null) => {
return formatDuration(duration);
};
const isSelectDisabled = (
scan: AttackPathScan,
selectedScanId: string | null,
) => {
return !scan.attributes.graph_data_ready || selectedScanId === scan.id;
};
const getSelectButtonLabel = (
scan: AttackPathScan,
selectedScanId: string | null,
) => {
if (selectedScanId === scan.id) {
return "Selected";
}
const getDisabledTooltip = (scan: AttackPathScan): string | null => {
if (scan.attributes.graph_data_ready) {
return "Select";
return null;
}
if (WAITING_STATES.includes(scan.attributes.state)) {
return "Waiting...";
if (scan.attributes.state === SCAN_STATES.SCHEDULED) {
return "Graph will be available once this scan runs and completes.";
}
if (scan.attributes.state === SCAN_STATES.AVAILABLE) {
return "This scan is queued. Graph will be available once it completes.";
}
if (scan.attributes.state === SCAN_STATES.EXECUTING) {
return "Scan is running. Graph will be available once it completes.";
}
if (scan.attributes.state === SCAN_STATES.FAILED) {
return "Failed";
return "This scan failed. No graph data is available.";
}
return "Select";
if (scan.attributes.state === SCAN_STATES.COMPLETED) {
return "This scan completed without producing graph data.";
}
return "Graph data is not available for this scan.";
};
const getSelectedRowSelection = (
@@ -97,11 +99,54 @@ const buildMetadata = (
const getColumns = ({
selectedScanId,
onSelectScan,
}: {
selectedScanId: string | null;
onSelectScan: (scanId: string) => void;
}): ColumnDef<AttackPathScan>[] => [
{
id: "select",
header: () => <span className="text-sm font-medium">Select</span>,
cell: ({ row }) => {
const isSelected = selectedScanId === row.original.id;
const canSelect = row.original.attributes.graph_data_ready;
const tooltip = getDisabledTooltip(row.original);
const radio = (
<RadioGroupItem
value={row.original.id}
checked={isSelected}
disabled={!canSelect}
className={cn(
"size-5",
canSelect &&
!isSelected &&
"border-text-neutral-secondary cursor-pointer",
!canSelect && "disabled:opacity-70",
)}
aria-label={
isSelected
? "Selected scan"
: canSelect
? "Select scan"
: "Scan not available"
}
/>
);
if (!canSelect && !isSelected && tooltip) {
return (
<Tooltip>
<TooltipTrigger asChild>
<span tabIndex={0}>{radio}</span>
</TooltipTrigger>
<TooltipContent>{tooltip}</TooltipContent>
</Tooltip>
);
}
return radio;
},
enableSorting: false,
},
{
accessorKey: "provider",
header: ({ column }) => (
@@ -135,22 +180,32 @@ const getColumns = ({
<DataTableColumnHeader column={column} title="Status" />
),
cell: ({ row }) => (
<ScanStatusBadge
status={row.original.attributes.state}
progress={row.original.attributes.progress}
graphDataReady={row.original.attributes.graph_data_ready}
/>
<div className="flex">
<ScanStatusBadge
status={row.original.attributes.state}
progress={row.original.attributes.progress}
/>
</div>
),
enableSorting: false,
},
{
accessorKey: "progress",
header: ({ column }) => (
<DataTableColumnHeader column={column} title="Progress" />
),
cell: ({ row }) => (
<span className="text-sm">{row.original.attributes.progress}%</span>
),
accessorKey: "graph_data_ready",
header: () => <span className="text-sm font-medium">Graph</span>,
cell: ({ row }) =>
row.original.attributes.graph_data_ready ? (
<Check
size={16}
aria-label="Graph available"
className="text-text-success-primary"
/>
) : (
<Minus
size={16}
aria-label="Graph not available"
className="text-text-neutral-secondary"
/>
),
enableSorting: false,
},
{
@@ -165,29 +220,6 @@ const getColumns = ({
),
enableSorting: false,
},
{
id: "actions",
header: () => <span className="sr-only">Actions</span>,
cell: ({ row }) => {
const isDisabled = isSelectDisabled(row.original, selectedScanId);
return (
<div className="flex justify-end">
<Button
type="button"
aria-label="Select scan"
disabled={isDisabled}
variant={isDisabled ? "secondary" : "default"}
onClick={() => onSelectScan(row.original.id)}
className="w-full max-w-24"
>
{getSelectButtonLabel(row.original, selectedScanId)}
</Button>
</div>
);
},
enableSorting: false,
},
];
/**
@@ -237,19 +269,27 @@ export const ScanListTable = ({ scans }: ScanListTableProps) => {
};
return (
<DataTable
columns={getColumns({
selectedScanId,
onSelectScan: handleSelectScan,
})}
data={paginatedScans}
metadata={buildMetadata(scans.length, currentPage, totalPages)}
controlledPage={currentPage}
controlledPageSize={pageSize}
onPageChange={handlePageChange}
onPageSizeChange={handlePageSizeChange}
enableRowSelection
rowSelection={getSelectedRowSelection(paginatedScans, selectedScanId)}
/>
<RadioGroup
value={selectedScanId ?? ""}
onValueChange={handleSelectScan}
className="gap-0"
>
<DataTable
columns={getColumns({ selectedScanId })}
data={paginatedScans}
metadata={buildMetadata(scans.length, currentPage, totalPages)}
controlledPage={currentPage}
controlledPageSize={pageSize}
onPageChange={handlePageChange}
onPageSizeChange={handlePageSizeChange}
onRowClick={(row) => {
if (row.original.attributes.graph_data_ready) {
handleSelectScan(row.original.id);
}
}}
enableRowSelection
rowSelection={getSelectedRowSelection(paginatedScans, selectedScanId)}
/>
</RadioGroup>
);
};
@@ -3,94 +3,55 @@
import { Loader2 } from "lucide-react";
import { Badge } from "@/components/shadcn/badge/badge";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/shadcn/tooltip";
import { cn } from "@/lib/utils";
import type { ScanState } from "@/types/attack-paths";
import { SCAN_STATES } from "@/types/attack-paths";
const BADGE_CONFIG: Record<
ScanState,
{ className: string; label: string; showGraphDot: boolean }
> = {
const BADGE_CONFIG: Record<ScanState, { className: string; label: string }> = {
[SCAN_STATES.SCHEDULED]: {
className: "bg-bg-neutral-tertiary text-text-neutral-primary",
label: "Scheduled",
showGraphDot: true,
},
[SCAN_STATES.AVAILABLE]: {
className: "bg-bg-neutral-tertiary text-text-neutral-primary",
label: "Queued",
showGraphDot: true,
},
[SCAN_STATES.EXECUTING]: {
className: "bg-bg-warning-secondary text-text-neutral-primary",
className: "bg-bg-info-secondary text-text-info",
label: "In Progress",
showGraphDot: false,
},
[SCAN_STATES.COMPLETED]: {
className: "bg-bg-pass-secondary text-text-success-primary",
label: "Completed",
showGraphDot: false,
},
[SCAN_STATES.FAILED]: {
className: "bg-bg-fail-secondary text-text-error-primary",
label: "Failed",
showGraphDot: true,
},
};
interface ScanStatusBadgeProps {
status: ScanState;
progress?: number;
graphDataReady?: boolean;
}
export const ScanStatusBadge = ({
status,
progress = 0,
graphDataReady = false,
}: ScanStatusBadgeProps) => {
const config = BADGE_CONFIG[status];
const graphDot = graphDataReady && config.showGraphDot && (
<span className="inline-block size-2 rounded-full bg-green-500" />
);
const tooltipText = graphDataReady
? "Graph available"
: status === SCAN_STATES.FAILED || status === SCAN_STATES.COMPLETED
? "Graph not available"
: "Graph not available yet";
const icon =
status === SCAN_STATES.EXECUTING ? (
<Loader2
size={14}
className={
graphDataReady ? "animate-spin text-green-500" : "animate-spin"
}
/>
) : (
graphDot
);
const label =
status === SCAN_STATES.EXECUTING
? `${config.label} (${progress}%)`
: config.label;
return (
<Tooltip>
<TooltipTrigger asChild>
<Badge className={`${config.className} gap-2`}>
{icon}
<span>{label}</span>
</Badge>
</TooltipTrigger>
<TooltipContent>{tooltipText}</TooltipContent>
</Tooltip>
<Badge className={cn(config.className, "gap-2")}>
{status === SCAN_STATES.EXECUTING && (
<Loader2 size={14} className="animate-spin" />
)}
<span>{label}</span>
</Badge>
);
};
@@ -14,6 +14,8 @@ import {
getAvailableQueries,
} from "@/actions/attack-paths";
import { adaptQueryResultToGraphData } from "@/actions/attack-paths/query-result.adapter";
import { FindingDetailDrawer } from "@/components/findings/table";
import { useFindingDetails } from "@/components/resources/table/use-finding-details";
import { AutoRefresh } from "@/components/scans";
import {
Alert,
@@ -30,6 +32,7 @@ import {
DialogTitle,
DialogTrigger,
} from "@/components/shadcn/dialog";
import { Spinner } from "@/components/shadcn/spinner/spinner";
import { useToast } from "@/components/ui";
import type {
AttackPathQuery,
@@ -37,7 +40,7 @@ import type {
AttackPathScan,
GraphNode,
} from "@/types/attack-paths";
import { ATTACK_PATH_QUERY_IDS } from "@/types/attack-paths";
import { ATTACK_PATH_QUERY_IDS, SCAN_STATES } from "@/types/attack-paths";
import {
AttackPathGraph,
@@ -65,6 +68,7 @@ export default function AttackPathsPage() {
const searchParams = useSearchParams();
const scanId = searchParams.get("scanId");
const graphState = useGraphState();
const finding = useFindingDetails();
const { toast } = useToast();
const [scansLoading, setScansLoading] = useState(true);
@@ -116,10 +120,17 @@ export default function AttackPathsPage() {
// Check if there's an executing scan for auto-refresh
const hasExecutingScan = scans.some(
(scan) =>
scan.attributes.state === "executing" ||
scan.attributes.state === "scheduled",
scan.attributes.state === SCAN_STATES.EXECUTING ||
scan.attributes.state === SCAN_STATES.SCHEDULED,
);
// Detect if the selected scan is showing data from a previous cycle
const selectedScan = scans.find((scan) => scan.id === scanId);
const isViewingPreviousCycleData =
selectedScan &&
selectedScan.attributes.graph_data_ready &&
selectedScan.attributes.state !== SCAN_STATES.COMPLETED;
// Callback to refresh scans (used by AutoRefresh component)
const refreshScans = async () => {
try {
@@ -304,6 +315,14 @@ export default function AttackPathsPage() {
graphState.selectNode(null);
};
const getFindingId = (node: GraphNode | null) =>
node ? String(node.properties?.id || node.id) : "";
const handleViewFinding = (findingId: string) => {
if (!findingId) return;
void finding.navigateToFinding(findingId);
};
const handleGraphExport = (svgElement: SVGSVGElement | null) => {
try {
if (svgElement) {
@@ -339,11 +358,11 @@ export default function AttackPathsPage() {
<h2 className="dark:text-prowler-theme-pale/90 text-xl font-semibold">
Attack Paths
</h2>
<p className="text-text-neutral-secondary dark:text-text-neutral-secondary mt-2 text-sm">
<p className="text-text-neutral-secondary mt-2 text-sm">
Select a scan, build a query, and visualize Attack Paths in your
infrastructure.
</p>
<p className="text-text-neutral-secondary dark:text-text-neutral-secondary mt-1 text-xs">
<p className="text-text-neutral-secondary mt-1 text-xs">
Scans can be selected when data is available. A new scan does not
interrupt access to existing data.
</p>
@@ -373,6 +392,21 @@ export default function AttackPathsPage() {
<ScanListTable scans={scans} />
</Suspense>
{/* Banner: viewing data from a previous scan cycle */}
{isViewingPreviousCycleData && (
<Alert variant="info">
<Info className="size-4" />
<AlertTitle>Viewing data from a previous scan</AlertTitle>
<AlertDescription>
This scan is currently{" "}
{selectedScan.attributes.state === SCAN_STATES.EXECUTING
? `running (${selectedScan.attributes.progress}%)`
: selectedScan.attributes.state}
. The graph data shown is from the last completed cycle.
</AlertDescription>
</Alert>
)}
{/* Query Builder Section - shown only after selecting a scan */}
{scanId && (
<div className="minimal-scrollbar rounded-large shadow-small border-border-neutral-secondary bg-bg-neutral-secondary relative z-0 flex w-full flex-col gap-4 overflow-auto border p-4">
@@ -568,7 +602,7 @@ export default function AttackPathsPage() {
<X size={16} />
</Button>
</div>
<p className="text-text-neutral-secondary dark:text-text-neutral-secondary mb-4 text-xs">
<p className="text-text-neutral-secondary mb-4 text-xs">
{graphState.selectedNode?.labels.some(
(label) =>
label
@@ -591,7 +625,7 @@ export default function AttackPathsPage() {
<h4 className="mb-2 text-xs font-semibold">
Type
</h4>
<p className="text-text-neutral-secondary dark:text-text-neutral-secondary text-xs">
<p className="text-text-neutral-secondary text-xs">
{graphState.selectedNode?.labels
.map(formatNodeLabel)
.join(", ")}
@@ -641,7 +675,7 @@ export default function AttackPathsPage() {
<div className="flex items-center justify-between">
<div className="flex-1">
<h3 className="text-lg font-semibold">Node Details</h3>
<p className="text-text-neutral-secondary dark:text-text-neutral-secondary mt-1 text-sm">
<p className="text-text-neutral-secondary mt-1 text-sm">
{String(
graphState.selectedNode.labels.some((label) =>
label.toLowerCase().includes("finding"),
@@ -659,15 +693,20 @@ export default function AttackPathsPage() {
{graphState.selectedNode.labels.some((label) =>
label.toLowerCase().includes("finding"),
) && (
<Button asChild variant="default" size="sm">
<a
href={`/findings?id=${String(graphState.selectedNode.properties?.id || graphState.selectedNode.id)}`}
target="_blank"
rel="noopener noreferrer"
aria-label={`View finding ${String(graphState.selectedNode.properties?.id || graphState.selectedNode.id)}`}
>
View Finding
</a>
<Button
variant="default"
size="sm"
onClick={() =>
handleViewFinding(getFindingId(graphState.selectedNode))
}
disabled={finding.findingDetailLoading}
aria-label={`View finding ${getFindingId(graphState.selectedNode)}`}
>
{finding.findingDetailLoading ? (
<Spinner className="size-4" />
) : (
"View Finding"
)}
</Button>
)}
<Button
@@ -685,9 +724,22 @@ export default function AttackPathsPage() {
<NodeDetailContent
node={graphState.selectedNode}
allNodes={graphState.data.nodes}
onViewFinding={handleViewFinding}
viewFindingLoading={finding.findingDetailLoading}
/>
</div>
)}
{finding.findingDetails && (
<FindingDetailDrawer
key={finding.findingDetails.id}
finding={finding.findingDetails}
defaultOpen
onOpenChange={(open) => {
if (!open) finding.resetFindingDetails();
}}
/>
)}
</>
)}
</div>
+8 -2
View File
@@ -25,8 +25,10 @@ describe("findings page", () => {
expect(source).toContain("resolveFindingScanDateFilters");
});
it("uses getLatestFindingGroups for non-date/scan queries and getFindingGroups for historical", () => {
expect(source).toContain("hasDateOrScan");
it("uses resolved filters to choose getFindingGroups for historical queries and getLatestFindingGroups otherwise", () => {
expect(source).toContain("hasHistoricalData");
expect(source).toContain("hasDateOrScanFilter(filtersWithScanDates)");
expect(source).toContain("hasDateOrScanFilter(filters)");
expect(source).toContain("getFindingGroups");
expect(source).toContain("getLatestFindingGroups");
});
@@ -34,4 +36,8 @@ describe("findings page", () => {
it("guards errors array access with a length check", () => {
expect(source).toContain("errors?.length > 0");
});
it("applies the shared default muted filter so muted findings are hidden unless the caller opts in", () => {
expect(source).toContain("applyDefaultMutedFilter");
});
});
+17 -19
View File
@@ -16,6 +16,7 @@ import {
import { ContentLayout } from "@/components/ui";
import { FilterTransitionWrapper } from "@/contexts";
import {
applyDefaultMutedFilter,
createScanDetailsMapping,
extractFiltersAndQuery,
extractSortAndKey,
@@ -34,28 +35,26 @@ export default async function Findings({
const { encodedSort } = extractSortAndKey(resolvedSearchParams);
const { filters, query } = extractFiltersAndQuery(resolvedSearchParams);
// Check if the searchParams contain any date or scan filter
const hasDateOrScan = hasDateOrScanFilter(resolvedSearchParams);
// TODO: Re-implement deep link support (/findings?id=<uuid>) using the grouped view's resource detail drawer
// once the legacy FindingDetailsSheet is fully deprecated (still used by /resources and overview dashboard).
const [providersData, scansData] = await Promise.all([
getProviders({ pageSize: 50 }),
getScans({ pageSize: 50 }),
]);
const filtersWithScanDates = await resolveFindingScanDateFilters({
filters,
scans: scansData?.data || [],
loadScan: async (scanId: string) => {
const response = await getScan(scanId);
return response?.data;
},
});
const filtersWithScanDates = applyDefaultMutedFilter(
await resolveFindingScanDateFilters({
filters,
scans: scansData?.data || [],
loadScan: async (scanId: string) => {
const response = await getScan(scanId);
return response?.data;
},
}),
);
const hasHistoricalData = hasDateOrScanFilter(filtersWithScanDates);
const metadataInfoData = await (
hasDateOrScan ? getMetadataInfo : getLatestMetadataInfo
hasHistoricalData ? getMetadataInfo : getLatestMetadataInfo
)({
query,
sort: encodedSort,
@@ -122,10 +121,9 @@ const SSRDataTable = async ({
const pageSize = parseInt(searchParams.pageSize?.toString() || "10", 10);
const { encodedSort } = extractSortAndKey(searchParams);
// Check if the searchParams contain any date or scan filter
const hasDateOrScan = hasDateOrScanFilter(searchParams);
const hasHistoricalData = hasDateOrScanFilter(filters);
const fetchFindingGroups = hasDateOrScan
const fetchFindingGroups = hasHistoricalData
? getFindingGroups
: getLatestFindingGroups;
@@ -154,7 +152,7 @@ const SSRDataTable = async ({
data={groups}
metadata={findingGroupsData?.meta}
resolvedFilters={filters}
hasHistoricalData={hasDateOrScan}
hasHistoricalData={hasHistoricalData}
/>
</>
);
+8 -27
View File
@@ -1,11 +1,6 @@
import { Suspense } from "react";
import {
AddProviderButton,
MutedFindingsConfigButton,
ProvidersAccountsTable,
ProvidersFilters,
} from "@/components/providers";
import { ProvidersAccountsView } from "@/components/providers";
import { SkeletonTableProviders } from "@/components/providers/table";
import { Skeleton } from "@/components/shadcn/skeleton/skeleton";
import { ContentLayout } from "@/components/ui";
@@ -56,15 +51,6 @@ export default async function Providers({
);
}
const ProvidersActions = () => {
return (
<div className="flex flex-wrap gap-4 md:justify-end">
<MutedFindingsConfigButton />
<AddProviderButton />
</div>
);
};
const ProvidersTableFallback = () => {
return (
<div className="flex flex-col gap-6">
@@ -120,17 +106,12 @@ const ProvidersAccountsContent = async ({
});
return (
<div className="flex flex-col gap-6">
<ProvidersFilters
filters={providersView.filters}
providers={providersView.providers}
actions={<ProvidersActions />}
/>
<ProvidersAccountsTable
isCloud={process.env.NEXT_PUBLIC_IS_CLOUD_ENV === "true"}
metadata={providersView.metadata}
rows={providersView.rows}
/>
</div>
<ProvidersAccountsView
isCloud={process.env.NEXT_PUBLIC_IS_CLOUD_ENV === "true"}
filters={providersView.filters}
providers={providersView.providers}
metadata={providersView.metadata}
rows={providersView.rows}
/>
);
};
@@ -0,0 +1,16 @@
import { readFileSync } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { describe, expect, it } from "vitest";
describe("client accordion content", () => {
const currentDir = path.dirname(fileURLToPath(import.meta.url));
const filePath = path.join(currentDir, "client-accordion-content.tsx");
const source = readFileSync(filePath, "utf8");
it("uses the shared standalone finding columns instead of the legacy findings columns", () => {
expect(source).toContain("getStandaloneFindingColumns");
expect(source).not.toContain("getColumnFindings");
});
});
@@ -5,7 +5,7 @@ import { useEffect, useRef, useState } from "react";
import { getFindings } from "@/actions/findings/findings";
import {
getColumnFindings,
getStandaloneFindingColumns,
SkeletonTableFindings,
} from "@/components/findings/table";
import { Accordion } from "@/components/ui/accordion/Accordion";
@@ -33,6 +33,7 @@ export const ClientAccordionContent = ({
const searchParams = useSearchParams();
const pageNumber = searchParams.get("page") || "1";
const complianceId = searchParams.get("complianceId");
const openFindingId = searchParams.get("id");
const defaultSort = "severity,status,-inserted_at";
const sort = searchParams.get("sort") || defaultSort;
const loadedPageRef = useRef<string | null>(null);
@@ -61,6 +62,7 @@ export const ClientAccordionContent = ({
filters: {
"filter[check_id__in]": checkIds.join(","),
"filter[scan]": scanId,
"filter[muted]": "false",
...(region && { "filter[region__in]": region }),
},
page: parseInt(pageNumber, 10),
@@ -159,12 +161,7 @@ export const ClientAccordionContent = ({
<h4 className="mb-2 text-sm font-medium">Findings</h4>
<DataTable
// Remove select and updated_at columns for compliance view
columns={getColumnFindings({}, 0).filter(
(col) =>
col.id !== "select" &&
!("accessorKey" in col && col.accessorKey === "updated_at"),
)}
columns={getStandaloneFindingColumns({ openFindingId })}
data={expandedFindings || []}
metadata={findings?.meta}
disableScroll={true}
+3 -5
View File
@@ -12,6 +12,7 @@ import {
PopoverTrigger,
} from "@/components/shadcn/popover";
import { useUrlFilters } from "@/hooks/use-url-filters";
import { toLocalDateString } from "@/lib/date-utils";
import { cn } from "@/lib/utils";
/** Batch mode: caller controls both the pending date value and the notification callback (all-or-nothing). */
@@ -67,17 +68,14 @@ export const CustomDatePicker = ({
const applyDateFilter = (selectedDate: Date | undefined) => {
if (onBatchChange) {
// Batch mode: notify caller instead of updating URL
onBatchChange(
"inserted_at",
selectedDate ? format(selectedDate, "yyyy-MM-dd") : "",
);
onBatchChange("inserted_at", toLocalDateString(selectedDate) ?? "");
return;
}
// Instant mode (default): push to URL immediately
if (selectedDate) {
// Format as YYYY-MM-DD for the API
updateFilter("inserted_at", format(selectedDate, "yyyy-MM-dd"));
updateFilter("inserted_at", toLocalDateString(selectedDate) ?? "");
} else {
updateFilter("inserted_at", null);
}

Some files were not shown because too many files have changed in this diff Show More