Compare commits

...

27 Commits

Author SHA1 Message Date
Hugo P.Brito 42ea4417be feat(m365): add entra_pim_role_usage_alert_exists security check
Add new security check entra_pim_role_usage_alert_exists for m365 provider.
Includes check implementation, metadata, and unit tests.
2026-04-20 14:54:15 +01:00
Pepe Fagoaga 2a9c538aff chore: review changelog for v5.24.1 (#10791) 2026-04-20 14:01:29 +02:00
Pepe Fagoaga bf1b53bbd2 fix(ui): sorting and filtering for findings (#10778)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2026-04-20 13:34:31 +02:00
César Arroba 94a2ea1e8f chore: update CODEOWNERS for new team hierarchy (#10706) 2026-04-20 11:39:00 +02:00
Daniel Barranquero f7194b32de docs: remove prowler ctf page (#10782) 2026-04-20 09:37:30 +02:00
Pedro Martín 6ffe4e95bf fix(api): detect silent failures in ResourceFindingMapping (#10724)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-04-20 09:00:43 +02:00
Alan Buscaglia 577aa14acc fix(ui): correct IaC findings counters (#10736)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2026-04-17 12:48:57 +02:00
Andoni Alonso 19c752c127 fix(cloudflare): guard validate_credentials against paginator infinite loops (#10771) 2026-04-17 11:23:31 +02:00
Alejandro Bailo f2d35f5885 fix(ui): exclude muted findings and polish filter selectors (#10734) 2026-04-17 11:07:22 +02:00
Josema Camacho 536e90f2a5 perf(attack-paths): cleanup task prioritization, restore default batch sizes to 1000, upgrade Cartography to 0.135.0 (#10729) 2026-04-17 10:22:30 +02:00
Daniel Barranquero 276a5d66bd feat(docs): add ctf documentation (#10761) 2026-04-16 19:35:52 +02:00
Alejandro Bailo 489c6c1073 fix: CHANGELOG minor issue (#10758) 2026-04-16 17:07:22 +02:00
Adrián Peña b08b072288 fix(api): exclude muted findings from pass_count, fail_count and manual_count (#10753) 2026-04-16 15:56:08 +02:00
Josema Camacho ca29e354b6 chore(deps): bump msgraph-sdk to 1.55.0 and azure-mgmt-resource to 24.0.0, remove marshmallow (#10733) 2026-04-16 15:34:28 +02:00
Alejandro Bailo 85a3927950 fix(ui): upgrade React 19.2.5 and Next.js 16.2.3 to mitigate CVE-2026-23869 (#10752) 2026-04-16 15:24:10 +02:00
Rubén De la Torre Vico 04fe3f65e0 chore(deps): enable Dependabot pre-commit ecosystem and bump hooks (#10732) 2026-04-16 13:38:11 +02:00
Andoni Alonso 297c9d0734 fix(sdk): move #10726 changelog entry to unreleased version (#10728) 2026-04-16 13:10:00 +02:00
Erich Blume a2a1a73749 fix(image): --registry-list crashes with AttributeError on global_provider (#10691)
Co-authored-by: Andoni A. <14891798+andoniaf@users.noreply.github.com>
2026-04-16 13:02:25 +02:00
lydiavilchez 08fbe17e29 fix(googleworkspace): treat secure Google defaults as PASS for Drive checks (#10727) 2026-04-16 13:01:55 +02:00
lydiavilchez d920f78059 fix(googleworkspace): treat secure Google defaults as PASS for Calendar checks (#10726) 2026-04-16 12:51:40 +02:00
Pepe Fagoaga 12bf3d5e70 fix(db): add missing tenant_id filter in queries (#10722) 2026-04-16 11:55:38 +02:00
Adrián Peña 4002c28b5d fix(api): add fallback handling for missing resources in findings (#10708) 2026-04-16 11:45:06 +02:00
Andoni Alonso 2439f54280 fix(sdk): allow account-scoped tokens in Cloudflare connection test (#10723) 2026-04-16 11:38:15 +02:00
Prowler Bot b0e59156e6 chore(ui): Bump version to v5.25.0 (#10711)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:46 +02:00
Prowler Bot f013bd4a53 docs: Update version to v5.24.0 (#10714)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:17 +02:00
Prowler Bot 6ad15f900f chore(release): Bump version to v5.25.0 (#10710)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:06 +02:00
Prowler Bot 1784bf38ab chore(api): Bump version to v1.26.0 (#10715)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:13:33 +02:00
109 changed files with 6719 additions and 3590 deletions
+1 -1
View File
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.24.0
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.25.0
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
+12 -11
View File
@@ -1,14 +1,15 @@
# SDK
/* @prowler-cloud/sdk
/prowler/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
/tests/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
/dashboard/ @prowler-cloud/sdk
/docs/ @prowler-cloud/sdk
/examples/ @prowler-cloud/sdk
/util/ @prowler-cloud/sdk
/contrib/ @prowler-cloud/sdk
/permissions/ @prowler-cloud/sdk
/codecov.yml @prowler-cloud/sdk @prowler-cloud/api
/* @prowler-cloud/detection-remediation
/prowler/ @prowler-cloud/detection-remediation
/prowler/compliance/ @prowler-cloud/compliance
/tests/ @prowler-cloud/detection-remediation
/dashboard/ @prowler-cloud/detection-remediation
/docs/ @prowler-cloud/detection-remediation
/examples/ @prowler-cloud/detection-remediation
/util/ @prowler-cloud/detection-remediation
/contrib/ @prowler-cloud/detection-remediation
/permissions/ @prowler-cloud/detection-remediation
/codecov.yml @prowler-cloud/detection-remediation @prowler-cloud/api
# API
/api/ @prowler-cloud/api
@@ -17,7 +18,7 @@
/ui/ @prowler-cloud/ui
# AI
/mcp_server/ @prowler-cloud/ai
/mcp_server/ @prowler-cloud/detection-remediation
# Platform
/.github/ @prowler-cloud/platform
+12
View File
@@ -66,6 +66,18 @@ updates:
cooldown:
default-days: 7
- package-ecosystem: "pre-commit"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 25
target-branch: master
labels:
- "dependencies"
- "pre-commit"
cooldown:
default-days: 7
# Dependabot Updates are temporary disabled - 2025/04/15
# v4.6
# - package-ecosystem: "pip"
+9 -9
View File
@@ -1,7 +1,7 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v6.0.0
hooks:
- id: check-merge-conflict
- id: check-yaml
@@ -16,7 +16,7 @@ repos:
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
rev: v2.16.0
hooks:
- id: pretty-format-toml
args: [--autofix]
@@ -24,21 +24,21 @@ repos:
## GITHUB ACTIONS
- repo: https://github.com/zizmorcore/zizmor-pre-commit
rev: v1.6.0
rev: v1.24.1
hooks:
- id: zizmor
files: ^\.github/
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
rev: v0.11.0
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v2.3.1
rev: v2.3.3
hooks:
- id: autoflake
exclude: ^skills/
@@ -50,20 +50,20 @@ repos:
]
- repo: https://github.com/pycqa/isort
rev: 5.13.2
rev: 8.0.1
hooks:
- id: isort
exclude: ^skills/
args: ["--profile", "black"]
- repo: https://github.com/psf/black
rev: 24.4.2
rev: 26.3.1
hooks:
- id: black
exclude: ^skills/
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
rev: 7.3.0
hooks:
- id: flake8
exclude: (contrib|^skills/)
@@ -93,7 +93,7 @@ repos:
pass_filenames: false
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
rev: v2.14.0
hooks:
- id: hadolint
args: ["--ignore=DL3013"]
+15
View File
@@ -2,6 +2,21 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.25.1] (Prowler v5.24.1)
### 🔄 Changed
- Attack Paths: Restore `SYNC_BATCH_SIZE` and `FINDINGS_BATCH_SIZE` defaults to 1000, upgrade Cartography to 0.135.0, enable Celery queue priority for cleanup task, rewrite Finding insertion, remove AWS graph cleanup and add timing logs [(#10729)](https://github.com/prowler-cloud/prowler/pull/10729)
### 🐞 Fixed
- Finding group resources endpoints now include findings without associated resources (orphaned IaC findings) as simulated resource rows, and return one row per finding when multiple findings share a resource [(#10708)](https://github.com/prowler-cloud/prowler/pull/10708)
- Attack Paths: Missing `tenant_id` filter while getting related findings after scan completes [(#10722)](https://github.com/prowler-cloud/prowler/pull/10722)
- Finding group counters `pass_count`, `fail_count` and `manual_count` now exclude muted findings [(#10753)](https://github.com/prowler-cloud/prowler/pull/10753)
- Silent data loss in `ResourceFindingMapping` bulk insert that left findings orphaned when `INSERT ... ON CONFLICT DO NOTHING` dropped rows without raising; added explicit `unique_fields` [(#10724)](https://github.com/prowler-cloud/prowler/pull/10724)
---
## [1.25.0] (Prowler v5.24.0)
### 🔄 Changed
+140 -116
View File
@@ -682,21 +682,21 @@ requests = ">=2.21.0,<3.0.0"
[[package]]
name = "alibabacloud-tea-openapi"
version = "0.4.1"
version = "0.4.4"
description = "Alibaba Cloud openapi SDK Library for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "alibabacloud_tea_openapi-0.4.1-py3-none-any.whl", hash = "sha256:e46bfa3ca34086d2c357d217a0b7284ecbd4b3bab5c88e075e73aec637b0e4a0"},
{file = "alibabacloud_tea_openapi-0.4.1.tar.gz", hash = "sha256:2384b090870fdb089c3c40f3fb8cf0145b8c7d6c14abbac521f86a01abb5edaf"},
{file = "alibabacloud_tea_openapi-0.4.4-py3-none-any.whl", hash = "sha256:cea6bc1fe35b0319a8752cb99eb0ecb0dab7ca1a71b99c12970ba0867410995f"},
{file = "alibabacloud_tea_openapi-0.4.4.tar.gz", hash = "sha256:1b0917bc03cd49417da64945e92731716d53e2eb8707b235f54e45b7473221ce"},
]
[package.dependencies]
alibabacloud-credentials = ">=1.0.2,<2.0.0"
alibabacloud-gateway-spi = ">=0.0.2,<1.0.0"
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
cryptography = ">=3.0.0,<45.0.0"
cryptography = {version = ">=3.0.0,<47.0.0", markers = "python_version >= \"3.8\""}
darabonba-core = ">=1.0.3,<2.0.0"
[[package]]
@@ -1526,19 +1526,19 @@ typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-resource"
version = "23.3.0"
version = "24.0.0"
description = "Microsoft Azure Resource Management Client Library for Python"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_mgmt_resource-23.3.0-py3-none-any.whl", hash = "sha256:ab216ee28e29db6654b989746e0c85a1181f66653929d2cb6e48fba66d9af323"},
{file = "azure_mgmt_resource-23.3.0.tar.gz", hash = "sha256:fc4f1fd8b6aad23f8af4ed1f913df5f5c92df117449dc354fea6802a2829fea4"},
{file = "azure_mgmt_resource-24.0.0-py3-none-any.whl", hash = "sha256:27b32cd223e2784269f5a0db3c282042886ee4072d79cedc638438ece7cd0df4"},
{file = "azure_mgmt_resource-24.0.0.tar.gz", hash = "sha256:cf6b8995fcdd407ac9ff1dd474087129429a1d90dbb1ac77f97c19b96237b265"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
azure-mgmt-core = ">=1.5.0"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
@@ -1822,19 +1822,19 @@ crt = ["awscrt (==0.27.6)"]
[[package]]
name = "cartography"
version = "0.132.0"
version = "0.135.0"
description = "Explore assets and their relationships across your technical infrastructure."
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "cartography-0.132.0-py3-none-any.whl", hash = "sha256:c070aa51d0ab4479cb043cae70b35e7df49f2fb5f1fa95ccf10000bbeb952262"},
{file = "cartography-0.132.0.tar.gz", hash = "sha256:7c6332bc57fd2629d7b83aee7bd95a7b2edb0d51ef746efa0461399e0b66625c"},
{file = "cartography-0.135.0-py3-none-any.whl", hash = "sha256:c62c32a6917b8f23a8b98fe2b6c7c4a918b50f55918482966c4dae1cf5f538e1"},
{file = "cartography-0.135.0.tar.gz", hash = "sha256:3f500cd22c3b392d00e8b49f62acc95fd4dcd559ce514aafe2eb8101133c7a49"},
]
[package.dependencies]
adal = ">=1.2.4"
aioboto3 = ">=13.0.0"
aioboto3 = ">=15.0.0"
azure-cli-core = ">=2.26.0"
azure-identity = ">=1.5.0"
azure-keyvault-certificates = ">=4.0.0"
@@ -1852,9 +1852,9 @@ azure-mgmt-keyvault = ">=10.0.0"
azure-mgmt-logic = ">=10.0.0"
azure-mgmt-monitor = ">=3.0.0"
azure-mgmt-network = ">=25.0.0"
azure-mgmt-resource = ">=10.2.0,<25.0.0"
azure-mgmt-resource = ">=24.0.0,<25"
azure-mgmt-security = ">=5.0.0"
azure-mgmt-sql = ">=3.0.1,<4"
azure-mgmt-sql = ">=3.0.1"
azure-mgmt-storage = ">=16.0.0"
azure-mgmt-synapse = ">=2.0.0"
azure-mgmt-web = ">=7.0.0"
@@ -1862,38 +1862,39 @@ azure-synapse-artifacts = ">=0.17.0"
backoff = ">=2.1.2"
boto3 = ">=1.15.1"
botocore = ">=1.18.1"
cloudflare = ">=4.1.0,<5.0.0"
cloudflare = ">=4.1.0"
crowdstrike-falconpy = ">=0.5.1"
cryptography = "*"
dnspython = ">=1.15.0"
duo-client = "*"
google-api-python-client = ">=1.7.8"
cryptography = ">=45.0.0"
dnspython = ">=2.0.0"
duo-client = ">=5.5.0"
google-api-python-client = ">=2.0.0"
google-auth = ">=2.37.0"
google-cloud-asset = ">=1.0.0"
google-cloud-resource-manager = ">=1.14.2"
httpx = ">=0.24.0"
kubernetes = ">=22.6.0"
marshmallow = ">=3.0.0rc7"
msgraph-sdk = "*"
marshmallow = ">=4.0.0"
msgraph-sdk = ">=1.53.0"
msrestazure = ">=0.6.4"
neo4j = ">=6.0.0"
oci = ">=2.71.0"
okta = "<1.0.0"
packageurl-python = "*"
packaging = "*"
packageurl-python = ">=0.17.0"
packaging = ">=26.0.0"
pagerduty = ">=4.0.1"
policyuniverse = ">=1.1.0.0"
PyJWT = {version = ">=2.0.0", extras = ["crypto"]}
python-dateutil = "*"
python-dateutil = ">=2.9.0"
python-digitalocean = ">=1.16.0"
pyyaml = ">=5.3.1"
requests = ">=2.22.0"
scaleway = ">=2.10.0"
slack-sdk = ">=3.37.0"
statsd = "*"
statsd = ">=4.0.0"
typer = ">=0.9.0"
types-aiobotocore-ecr = "*"
xmltodict = "*"
types-aiobotocore-ecr = ">=3.1.0"
workos = ">=5.44.0"
xmltodict = ">=1.0.0"
[[package]]
name = "celery"
@@ -2503,62 +2504,74 @@ dev = ["bandit", "coverage", "flake8", "pydocstyle", "pylint", "pytest", "pytest
[[package]]
name = "cryptography"
version = "44.0.3"
version = "46.0.6"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
groups = ["main", "dev"]
files = [
{file = "cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01"},
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d"},
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904"},
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44"},
{file = "cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d"},
{file = "cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d"},
{file = "cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c"},
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f"},
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5"},
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b"},
{file = "cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028"},
{file = "cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06"},
{file = "cryptography-44.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5"},
{file = "cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c"},
{file = "cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"},
{file = "cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19"},
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738"},
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c"},
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f"},
{file = "cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2"},
{file = "cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124"},
{file = "cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4"},
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a"},
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d"},
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736"},
{file = "cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed"},
{file = "cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4"},
{file = "cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa"},
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58"},
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb"},
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72"},
{file = "cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c"},
{file = "cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a"},
{file = "cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e"},
{file = "cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759"},
]
[package.dependencies]
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""]
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""]
pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
nox = ["nox[uv] (>=2024.4.15)"]
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -3740,19 +3753,19 @@ urllib3 = ["packaging", "urllib3"]
[[package]]
name = "google-auth-httplib2"
version = "0.2.1"
version = "0.2.0"
description = "Google Authentication Library: httplib2 transport"
optional = false
python-versions = ">=3.7"
python-versions = "*"
groups = ["main"]
files = [
{file = "google_auth_httplib2-0.2.1-py3-none-any.whl", hash = "sha256:1be94c611db91c01f9703e7f62b0a59bbd5587a95571c7b6fade510d648bc08b"},
{file = "google_auth_httplib2-0.2.1.tar.gz", hash = "sha256:5ef03be3927423c87fb69607b42df23a444e434ddb2555b73b3679793187b7de"},
{file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"},
{file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"},
]
[package.dependencies]
google-auth = ">=1.32.0,<3.0.0"
httplib2 = ">=0.19.0,<1.0.0"
google-auth = "*"
httplib2 = ">=0.19.0"
[[package]]
name = "google-cloud-access-context-manager"
@@ -5181,24 +5194,16 @@ files = [
[[package]]
name = "marshmallow"
version = "3.26.2"
version = "4.3.0"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.9"
python-versions = ">=3.10"
groups = ["main", "dev"]
files = [
{file = "marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73"},
{file = "marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57"},
{file = "marshmallow-4.3.0-py3-none-any.whl", hash = "sha256:46c4fe6984707e3cbd485dfebbf0a59874f58d695aad05c1668d15e8c6e13b46"},
{file = "marshmallow-4.3.0.tar.gz", hash = "sha256:fb43c53b3fe240b8f6af37223d6ef1636f927ad9bea8ab323afad95dff090880"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
tests = ["pytest", "simplejson"]
[[package]]
name = "matplotlib"
version = "3.10.8"
@@ -5492,14 +5497,14 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.23.0"
version = "1.55.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "msgraph_sdk-1.23.0-py3-none-any.whl", hash = "sha256:58e0047b4ca59fd82022c02cd73fec0170a3d84f3b76721e3db2a0314df9a58a"},
{file = "msgraph_sdk-1.23.0.tar.gz", hash = "sha256:6dd1ba9a46f5f0ce8599fd9610133adbd9d1493941438b5d3632fce9e55ed607"},
{file = "msgraph_sdk-1.55.0-py3-none-any.whl", hash = "sha256:c8e68ebc4b88af5111de312e7fa910a4e76ddf48a4534feadb1fb8a411c48cfc"},
{file = "msgraph_sdk-1.55.0.tar.gz", hash = "sha256:6df691a31954a050d26b8a678968017e157d940fb377f2a8a4e17a9741b98756"},
]
[package.dependencies]
@@ -5925,23 +5930,24 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "oci"
version = "2.160.3"
version = "2.169.0"
description = "Oracle Cloud Infrastructure Python SDK"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "oci-2.160.3-py3-none-any.whl", hash = "sha256:858bff3e697098bdda44833d2476bfb4632126f0182178e7dbde4dbd156d71f0"},
{file = "oci-2.160.3.tar.gz", hash = "sha256:57514889be3b713a8385d86e3ba8a33cf46e3563c2a7e29a93027fb30b8a2537"},
{file = "oci-2.169.0-py3-none-any.whl", hash = "sha256:c71bb5143f307791082b3e33cc1545c2490a518cfed85ab1948ef5107c36d30b"},
{file = "oci-2.169.0.tar.gz", hash = "sha256:f3c5fff00b01783b5325ea7b13bf140053ec1e9f41da20bfb9c8a349ee7662fa"},
]
[package.dependencies]
certifi = "*"
circuitbreaker = {version = ">=1.3.1,<3.0.0", markers = "python_version >= \"3.7\""}
cryptography = ">=3.2.1,<46.0.0"
pyOpenSSL = ">=17.5.0,<25.0.0"
cryptography = ">=3.2.1,<47.0.0"
pyOpenSSL = ">=17.5.0,<27.0.0"
python-dateutil = ">=2.5.3,<3.0.0"
pytz = ">=2016.10"
urllib3 = {version = ">=2.6.3", markers = "python_version >= \"3.10.0\""}
[package.extras]
adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""]
@@ -6659,7 +6665,7 @@ files = [
[[package]]
name = "prowler"
version = "5.23.0"
version = "5.25.0"
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
optional = false
python-versions = ">=3.10,<3.13"
@@ -6679,7 +6685,7 @@ alibabacloud-rds20140815 = "12.0.0"
alibabacloud_sas20181203 = "6.1.0"
alibabacloud-sls20201230 = "5.9.0"
alibabacloud_sts20150401 = "1.1.6"
alibabacloud_tea_openapi = "0.4.1"
alibabacloud_tea_openapi = "0.4.4"
alibabacloud_vpc20160428 = "6.13.0"
alive-progress = "3.3.0"
awsipranges = "0.3.3"
@@ -6701,7 +6707,7 @@ azure-mgmt-postgresqlflexibleservers = "1.1.0"
azure-mgmt-rdbms = "10.1.0"
azure-mgmt-recoveryservices = "3.1.0"
azure-mgmt-recoveryservicesbackup = "9.2.0"
azure-mgmt-resource = "23.3.0"
azure-mgmt-resource = "24.0.0"
azure-mgmt-search = "9.1.0"
azure-mgmt-security = "7.0.0"
azure-mgmt-sql = "3.0.1"
@@ -6714,29 +6720,29 @@ boto3 = "1.40.61"
botocore = "1.40.61"
cloudflare = "4.3.1"
colorama = "0.4.6"
cryptography = "44.0.3"
cryptography = "46.0.6"
dash = "3.1.1"
dash-bootstrap-components = "2.0.3"
defusedxml = ">=0.7.1"
defusedxml = "0.7.1"
detect-secrets = "1.5.0"
dulwich = "0.23.0"
google-api-python-client = "2.163.0"
google-auth-httplib2 = ">=0.1,<0.3"
google-auth-httplib2 = "0.2.0"
h2 = "4.3.0"
jsonschema = "4.23.0"
kubernetes = "32.0.1"
markdown = "3.10.2"
microsoft-kiota-abstractions = "1.9.2"
msgraph-sdk = "1.23.0"
msgraph-sdk = "1.55.0"
numpy = "2.0.2"
oci = "2.160.3"
oci = "2.169.0"
openstacksdk = "4.2.0"
pandas = "2.2.3"
py-iam-expand = "0.1.0"
py-ocsf-models = "0.8.1"
pydantic = ">=2.0,<3.0"
pydantic = "2.12.5"
pygithub = "2.8.0"
python-dateutil = ">=2.9.0.post0,<3.0.0"
python-dateutil = "2.9.0.post0"
pytz = "2025.1"
schema = "0.7.5"
shodan = "1.31.0"
@@ -6749,7 +6755,7 @@ uuid6 = "2024.7.10"
type = "git"
url = "https://github.com/prowler-cloud/prowler.git"
reference = "master"
resolved_reference = "6ac90eb1b58590b6f2f51645dbef17b9231053f4"
resolved_reference = "ca29e354b622198ff6a70e2ea5eb04e4a44a0903"
[[package]]
name = "psutil"
@@ -6958,11 +6964,11 @@ description = "C parser in Python"
optional = false
python-versions = ">=3.10"
groups = ["main", "dev"]
markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\""
files = [
{file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"},
{file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"},
]
markers = {main = "implementation_name != \"PyPy\" and platform_python_implementation != \"PyPy\"", dev = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\""}
[[package]]
name = "pydantic"
@@ -7288,18 +7294,19 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "
[[package]]
name = "pyopenssl"
version = "24.3.0"
version = "26.0.0"
description = "Python wrapper module around the OpenSSL library"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"},
{file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"},
{file = "pyopenssl-26.0.0-py3-none-any.whl", hash = "sha256:df94d28498848b98cc1c0ffb8ef1e71e40210d3b0a8064c9d29571ed2904bf81"},
{file = "pyopenssl-26.0.0.tar.gz", hash = "sha256:f293934e52936f2e3413b89c6ce36df66a0b34ae1ea3a053b8c5020ff2f513fc"},
]
[package.dependencies]
cryptography = ">=41.0.5,<45"
cryptography = ">=46.0.0,<47"
typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""}
[package.extras]
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"]
@@ -8807,6 +8814,23 @@ markupsafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog (>=2.3)"]
[[package]]
name = "workos"
version = "6.0.4"
description = "WorkOS Python Client"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "workos-6.0.4-py3-none-any.whl", hash = "sha256:548668b3702673536f853ba72a7b5bbbc269e467aaf9ac4f477b6e0177df5e21"},
{file = "workos-6.0.4.tar.gz", hash = "sha256:b0bfe8fd212b8567422c4ea3732eb33608794033eb3a69900c6b04db183c32d6"},
]
[package.dependencies]
cryptography = ">=46.0,<47.0"
httpx = ">=0.28,<1.0"
pyjwt = ">=2.12,<3.0"
[[package]]
name = "wrapt"
version = "1.17.3"
@@ -9400,4 +9424,4 @@ files = [
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
content-hash = "077e89853cfe3a6d934841488cfa5a98ff6c92b71f74b817b71387d11559f143"
content-hash = "a3ab982d11a87d951ff15694d2ca7fd51f1f51a451abb0baa067ccf6966367a8"
+2 -3
View File
@@ -38,7 +38,7 @@ dependencies = [
"matplotlib (==3.10.8)",
"reportlab (==4.4.10)",
"neo4j (==6.1.0)",
"cartography (==0.132.0)",
"cartography (==0.135.0)",
"gevent (==25.9.1)",
"werkzeug (==3.1.7)",
"sqlparse (==0.5.5)",
@@ -50,7 +50,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.25.0"
version = "1.26.0"
[project.scripts]
celery = "src.backend.config.settings.celery"
@@ -62,7 +62,6 @@ django-silk = "5.3.2"
docker = "7.1.0"
filelock = "3.20.3"
freezegun = "1.5.1"
marshmallow = "==3.26.2"
mypy = "1.10.1"
pylint = "3.2.5"
pytest = "9.0.3"
@@ -0,0 +1,23 @@
from django.db import migrations
TASK_NAME = "attack-paths-cleanup-stale-scans"
def set_cleanup_priority(apps, schema_editor):
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=0)
def unset_cleanup_priority(apps, schema_editor):
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=None)
class Migration(migrations.Migration):
dependencies = [
("api", "0089_backfill_finding_group_status_muted"),
]
operations = [
migrations.RunPython(set_cleanup_priority, unset_cleanup_priority),
]
+1 -1
View File
@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.25.0
version: 1.26.0
description: |-
Prowler API specification.
+32 -1
View File
@@ -57,6 +57,7 @@ from api.models import (
ProviderGroupMembership,
ProviderSecret,
Resource,
ResourceFindingMapping,
Role,
RoleProviderGroupRelationship,
SAMLConfiguration,
@@ -15465,7 +15466,7 @@ class TestFindingGroupViewSet:
attrs = data[0]["attributes"]
assert attrs["status"] == "FAIL"
assert attrs["muted"] is True
assert attrs["fail_count"] == 2
assert attrs["fail_count"] == 0
assert attrs["fail_muted_count"] == 2
assert attrs["pass_muted_count"] == 0
assert attrs["manual_muted_count"] == 0
@@ -16030,6 +16031,36 @@ class TestFindingGroupViewSet:
# s3_bucket_public_access has 2 findings with 2 different resources
assert len(data) == 2
def test_resources_id_matches_resource_id_for_mapped_findings(
self, authenticated_client, finding_groups_fixture
):
"""Findings with a resource expose the resource id as row id (hot path contract)."""
response = authenticated_client.get(
reverse(
"finding-group-resources", kwargs={"pk": "s3_bucket_public_access"}
),
{"filter[inserted_at]": TODAY},
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data, "expected resources in response"
resource_ids = set(
ResourceFindingMapping.objects.filter(
finding__check_id="s3_bucket_public_access",
).values_list("resource_id", flat=True)
)
finding_ids = set(
Finding.objects.filter(
check_id="s3_bucket_public_access",
).values_list("id", flat=True)
)
returned_ids = {item["id"] for item in data}
assert returned_ids <= {str(rid) for rid in resource_ids}
assert returned_ids.isdisjoint({str(fid) for fid in finding_ids})
def test_resources_fields(self, authenticated_client, finding_groups_fixture):
"""Test resource fields (uid, name, service, region, type) have valid values."""
response = authenticated_client.get(
+3 -2
View File
@@ -4225,10 +4225,11 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
Serializer for Finding Group Resources - resources within a finding group.
Returns individual resources with their current status, severity,
and timing information.
and timing information. Orphan findings (without any resource) expose the
finding id as `id` so the row stays identifiable in the UI.
"""
id = serializers.UUIDField(source="resource_id")
id = serializers.UUIDField(source="row_id")
resource = serializers.SerializerMethodField()
provider = serializers.SerializerMethodField()
finding_id = serializers.UUIDField()
+270 -58
View File
@@ -35,11 +35,13 @@ from django.db.models import (
CharField,
Count,
DecimalField,
Exists,
ExpressionWrapper,
F,
IntegerField,
Max,
Min,
OuterRef,
Prefetch,
Q,
QuerySet,
@@ -415,7 +417,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.25.0"
spectacular_settings.VERSION = "1.26.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -7125,17 +7127,16 @@ class FindingGroupViewSet(BaseRLSViewSet):
output_field=IntegerField(),
)
# `pass_count`, `fail_count` and `manual_count` count *every* finding
# for the check (muted or not) so the aggregated `status` reflects the
# underlying check outcome regardless of mute state. Whether the group
# is actionable is signalled by the orthogonal `muted` flag below.
# `pass_count`, `fail_count` and `manual_count` only count non-muted
# findings. Muted findings are tracked separately via the
# `*_muted_count` fields.
return (
queryset.values("check_id")
.annotate(
severity_order=Max(severity_case),
pass_count=Count("id", filter=Q(status="PASS")),
fail_count=Count("id", filter=Q(status="FAIL")),
manual_count=Count("id", filter=Q(status="MANUAL")),
pass_count=Count("id", filter=Q(status="PASS", muted=False)),
fail_count=Count("id", filter=Q(status="FAIL", muted=False)),
manual_count=Count("id", filter=Q(status="MANUAL", muted=False)),
pass_muted_count=Count("id", filter=Q(status="PASS", muted=True)),
fail_muted_count=Count("id", filter=Q(status="FAIL", muted=True)),
manual_muted_count=Count("id", filter=Q(status="MANUAL", muted=True)),
@@ -7280,12 +7281,14 @@ class FindingGroupViewSet(BaseRLSViewSet):
# finding-level aggregation path.
row.pop("nonmuted_count", None)
# Compute aggregated status. Counts are inclusive of muted findings,
# so the underlying check outcome surfaces even when the group is
# fully muted.
if row.get("fail_count", 0) > 0:
# Compute aggregated status from non-muted counts first, then
# fall back to muted counts so fully-muted groups still reflect
# the underlying check outcome.
total_fail = row.get("fail_count", 0) + row.get("fail_muted_count", 0)
total_pass = row.get("pass_count", 0) + row.get("pass_muted_count", 0)
if total_fail > 0:
row["status"] = "FAIL"
elif row.get("pass_count", 0) > 0:
elif total_pass > 0:
row["status"] = "PASS"
else:
row["status"] = "MANUAL"
@@ -7385,9 +7388,12 @@ class FindingGroupViewSet(BaseRLSViewSet):
if computed_params.get("status") or computed_params.getlist("status__in"):
queryset = queryset.annotate(
total_fail=F("fail_count") + F("fail_muted_count"),
total_pass=F("pass_count") + F("pass_muted_count"),
).annotate(
aggregated_status=Case(
When(fail_count__gt=0, then=Value("FAIL")),
When(pass_count__gt=0, then=Value("PASS")),
When(total_fail__gt=0, then=Value("FAIL")),
When(total_pass__gt=0, then=Value("PASS")),
default=Value("MANUAL"),
output_field=CharField(),
)
@@ -7578,6 +7584,53 @@ class FindingGroupViewSet(BaseRLSViewSet):
.order_by(*ordering)
)
def _orphan_findings_queryset(self, filtered_queryset, finding_ids=None):
"""Findings in the filtered set with no ResourceFindingMapping entries."""
orphan_qs = filtered_queryset.filter(
~Exists(ResourceFindingMapping.objects.filter(finding_id=OuterRef("pk")))
)
if finding_ids is not None:
orphan_qs = orphan_qs.filter(id__in=finding_ids)
return orphan_qs
def _has_orphan_findings(self, filtered_queryset) -> bool:
"""Return True if any finding in the filtered set has no resource mapping."""
return self._orphan_findings_queryset(filtered_queryset).exists()
def _orphan_aggregation_values(self, orphan_queryset):
"""Raw rows for orphan findings; resource payload synthesized from metadata.
check_metadata is stored with lowercase keys (see
`prowler.lib.outputs.finding.Finding.get_metadata`) and
`Finding.resource_groups` is already denormalized at ingest time.
"""
return orphan_queryset.annotate(
_provider_type=F("scan__provider__provider"),
_provider_uid=F("scan__provider__uid"),
_provider_alias=F("scan__provider__alias"),
_svc=KeyTextTransform("servicename", "check_metadata"),
_region=KeyTextTransform("region", "check_metadata"),
_rtype=KeyTextTransform("resourcetype", "check_metadata"),
_rgroup=F("resource_groups"),
).values(
"id",
"uid",
"status",
"severity",
"delta",
"muted",
"muted_reason",
"first_seen_at",
"inserted_at",
"_provider_type",
"_provider_uid",
"_provider_alias",
"_svc",
"_region",
"_rtype",
"_rgroup",
)
def _post_process_resources(self, resource_data):
"""Convert resource aggregation rows to API output."""
results = []
@@ -7599,9 +7652,13 @@ class FindingGroupViewSet(BaseRLSViewSet):
else:
delta = None
resource_id = row["resource_id"]
finding_id = str(row["finding_id"]) if row.get("finding_id") else None
results.append(
{
"resource_id": row["resource_id"],
"row_id": resource_id,
"resource_id": resource_id,
"resource_uid": row["resource_uid"],
"resource_name": row["resource_name"],
"resource_service": row["resource_service"],
@@ -7620,9 +7677,46 @@ class FindingGroupViewSet(BaseRLSViewSet):
"muted": bool(row.get("muted", False)),
"muted_reason": row.get("muted_reason"),
"resource_group": row.get("resource_group", ""),
"finding_id": (
str(row["finding_id"]) if row.get("finding_id") else None
),
"finding_id": finding_id,
}
)
return results
def _post_process_orphans(self, orphan_rows):
"""Convert orphan finding rows into the same API shape as mapping rows."""
results = []
for row in orphan_rows:
status_val = row["status"]
status = status_val if status_val in ("FAIL", "PASS") else "MANUAL"
muted = bool(row["muted"])
delta_val = row.get("delta")
delta = delta_val if delta_val in ("new", "changed") and not muted else None
finding_id = str(row["id"])
results.append(
{
"row_id": finding_id,
"resource_id": None,
"resource_uid": row["uid"],
"resource_name": row["uid"],
"resource_service": row["_svc"] or "",
"resource_region": row["_region"] or "",
"resource_type": row["_rtype"] or "",
"provider_type": row["_provider_type"],
"provider_uid": row["_provider_uid"],
"provider_alias": row["_provider_alias"],
"status": status,
"severity": row["severity"],
"delta": delta,
"first_seen_at": row["first_seen_at"],
"last_seen_at": row["inserted_at"],
"muted": muted,
"muted_reason": row.get("muted_reason"),
"resource_group": row["_rgroup"] or "",
"finding_id": finding_id,
}
)
@@ -7683,16 +7777,14 @@ class FindingGroupViewSet(BaseRLSViewSet):
sort_param, self._FINDING_GROUP_SORT_MAP
)
if ordering:
# status_order is annotated on demand so groups can be sorted by
# their aggregated status (FAIL > PASS > MANUAL), mirroring the
# priority used in _post_process_aggregation. Counts are
# inclusive of muted findings, so the underlying check outcome
# surfaces even for fully muted groups.
if any(field.lstrip("-") == "status_order" for field in ordering):
aggregated_queryset = aggregated_queryset.annotate(
total_fail_for_sort=F("fail_count") + F("fail_muted_count"),
total_pass_for_sort=F("pass_count") + F("pass_muted_count"),
).annotate(
status_order=Case(
When(fail_count__gt=0, then=Value(3)),
When(pass_count__gt=0, then=Value(2)),
When(total_fail_for_sort__gt=0, then=Value(3)),
When(total_pass_for_sort__gt=0, then=Value(2)),
default=Value(1),
output_field=IntegerField(),
)
@@ -7731,41 +7823,64 @@ class FindingGroupViewSet(BaseRLSViewSet):
def _paginated_resource_response(
self, request, filtered_queryset, resource_ids, tenant_id
):
"""Paginate and return resources.
"""Paginate and return resources, appending orphan findings when present.
Without sort: paginate lightweight resource IDs first, aggregate only the page.
With sort: build a lightweight ordering subquery (resource_id + sort keys),
paginate that, then aggregate full details only for the page.
Hot path (no orphans, or resource filter applied): resources come from
ResourceFindingMapping aggregation. Untouched pre-existing behaviour.
Orphan fallback: findings without a mapping (e.g. IaC) are appended
after mapping rows as synthesised resource-like rows so they remain
visible in the UI without paying the aggregation cost on the hot path.
"""
sort_param = request.query_params.get("sort")
ordering = None
if sort_param:
ordering = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
if ordering:
if "resource_id" not in {field.lstrip("-") for field in ordering}:
ordering.append("resource_id")
validated = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
ordering = validated if validated else None
# Phase 1: lightweight aggregation with only sort keys, paginate
ordering_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=resource_ids,
tenant_id=tenant_id,
ordering=ordering,
)
page = self.paginate_queryset(ordering_qs)
if page is not None:
page_ids = [row["resource_id"] for row in page]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
# Re-sort to match the page ordering
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return self.get_paginated_response(serializer.data)
# Resource filters can only match findings with resources; skip orphan
# detection entirely when they are present.
if resource_ids is not None:
return self._mapping_paginated_response(
request, filtered_queryset, resource_ids, tenant_id, ordering
)
page_ids = [row["resource_id"] for row in ordering_qs]
has_mappings = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=None, tenant_id=tenant_id
).exists()
if has_mappings:
# Normal or mixed group: serve only resource-mapped rows.
# TODO: Orphan findings in mixed groups are intentionally excluded
# until the ephemeral resources strategy is decided. When resolved,
# route mixed groups to _combined_paginated_response instead.
return self._mapping_paginated_response(
request, filtered_queryset, resource_ids, tenant_id, ordering
)
# Pure orphan group (e.g. IaC): synthesize resource-like rows.
return self._combined_paginated_response(
request, filtered_queryset, tenant_id, ordering
)
def _mapping_paginated_response(
self, request, filtered_queryset, resource_ids, tenant_id, ordering
):
"""Mapping-only paginated response (original fast path)."""
if ordering:
if "resource_id" not in {field.lstrip("-") for field in ordering}:
ordering.append("resource_id")
# Phase 1: lightweight aggregation with only sort keys, paginate
ordering_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=resource_ids,
tenant_id=tenant_id,
ordering=ordering,
)
page = self.paginate_queryset(ordering_qs)
if page is not None:
page_ids = [row["resource_id"] for row in page]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
@@ -7773,10 +7888,18 @@ class FindingGroupViewSet(BaseRLSViewSet):
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
return self.get_paginated_response(serializer.data)
page_ids = [row["resource_id"] for row in ordering_qs]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
# No sort (or only empty sort fragments): paginate lightweight resource IDs
# first, aggregate only the page.
mapping_qs = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=resource_ids, tenant_id=tenant_id
)
@@ -7804,6 +7927,95 @@ class FindingGroupViewSet(BaseRLSViewSet):
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
def _combined_paginated_response(
self, request, filtered_queryset, tenant_id, ordering
):
"""Mapping rows + orphan findings appended at end.
Orphans sit after mapping rows regardless of sort. This keeps the
mapping-only code path intact for checks that have no orphans (the
common case) and avoids paying UNION/coalesce costs there.
"""
mapping_qs = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=None, tenant_id=tenant_id
)
mapping_count = mapping_qs.values("resource_id").distinct().count()
orphan_ids = list(
self._orphan_findings_queryset(filtered_queryset)
.order_by("id")
.values_list("id", flat=True)
)
orphan_count = len(orphan_ids)
total = mapping_count + orphan_count
# Paginate a simple [0..total) index sequence so DRF produces proper
# links/meta; then slice mapping / orphan sources accordingly.
page = self.paginate_queryset(range(total))
page_indices = list(page) if page is not None else list(range(total))
mapping_indices = [i for i in page_indices if i < mapping_count]
orphan_positions = [
i - mapping_count for i in page_indices if i >= mapping_count
]
mapping_results = []
if mapping_indices:
start = mapping_indices[0]
stop = mapping_indices[-1] + 1
if ordering:
ordering_fields = list(ordering)
if "resource_id" not in {
field.lstrip("-") for field in ordering_fields
}:
ordering_fields.append("resource_id")
ordered_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=None,
tenant_id=tenant_id,
ordering=ordering_fields,
)
slice_rids = [row["resource_id"] for row in ordered_qs[start:stop]]
else:
slice_rids = list(
mapping_qs.values_list("resource_id", flat=True)
.distinct()
.order_by("resource_id")[start:stop]
)
if slice_rids:
resource_data = self._build_resource_aggregation(
filtered_queryset,
resource_ids=slice_rids,
tenant_id=tenant_id,
)
rows_by_rid = {row["resource_id"]: row for row in resource_data}
ordered_rows = [
rows_by_rid[rid] for rid in slice_rids if rid in rows_by_rid
]
mapping_results = self._post_process_resources(ordered_rows)
orphan_results = []
if orphan_positions:
slice_fids = [orphan_ids[pos] for pos in orphan_positions]
raw_rows = list(
self._orphan_aggregation_values(
self._orphan_findings_queryset(
filtered_queryset, finding_ids=slice_fids
)
)
)
rows_by_fid = {row["id"]: row for row in raw_rows}
ordered_rows = [
rows_by_fid[fid] for fid in slice_fids if fid in rows_by_fid
]
orphan_results = self._post_process_orphans(ordered_rows)
results = mapping_results + orphan_results
serializer = FindingGroupResourceSerializer(results, many=True)
if page is not None:
return self.get_paginated_response(serializer.data)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
"""
List finding groups with aggregation and filtering.
+3 -1
View File
@@ -17,8 +17,10 @@ celery_app.config_from_object("django.conf:settings", namespace="CELERY")
celery_app.conf.update(result_extended=True, result_expires=None)
celery_app.conf.broker_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT,
"queue_order_strategy": "priority",
}
celery_app.conf.task_default_priority = 6
celery_app.conf.result_backend_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
}
+46 -10
View File
@@ -1,6 +1,8 @@
# Portions of this file are based on code from the Cartography project
# (https://github.com/cartography-cncf/cartography), which is licensed under the Apache 2.0 License.
import time
from typing import Any
import aioboto3
@@ -33,7 +35,7 @@ def start_aws_ingestion(
For the scan progress updates:
- The caller of this function (`tasks.jobs.attack_paths.scan.run`) has set it to 2.
- When the control returns to the caller, it will be set to 95.
- When the control returns to the caller, it will be set to 93.
"""
# Initialize variables common to all jobs
@@ -89,34 +91,50 @@ def start_aws_ingestion(
logger.info(
f"Syncing function permission_relationships for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.RESOURCE_FUNCTIONS["permission_relationships"](**sync_args)
logger.info(
f"Synced function permission_relationships for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 88)
if "resourcegroupstaggingapi" in requested_syncs:
logger.info(
f"Syncing function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.RESOURCE_FUNCTIONS["resourcegroupstaggingapi"](**sync_args)
logger.info(
f"Synced function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 89)
logger.info(
f"Syncing ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_ec2_iaminstanceprofile.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 90)
logger.info(
f"Syncing lambda_ecr analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_analysis_job(
"aws_lambda_ecr.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lambda_ecr analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
if all(
s in requested_syncs
@@ -125,25 +143,34 @@ def start_aws_ingestion(
logger.info(
f"Syncing lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_lb_container_exposure.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
if all(s in requested_syncs for s in ["ec2:network_acls", "ec2:load_balancer_v2"]):
logger.info(
f"Syncing lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_lb_nacl_direct.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 91)
logger.info(f"Syncing metadata for AWS account {prowler_api_provider.uid}")
t0 = time.perf_counter()
cartography_aws.merge_module_sync_metadata(
neo4j_session,
group_type="AWSAccount",
@@ -152,24 +179,23 @@ def start_aws_ingestion(
update_tag=cartography_config.update_tag,
stat_handler=cartography_aws.stat_handler,
)
logger.info(
f"Synced metadata for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 92)
# Removing the added extra field
del common_job_parameters["AWS_ID"]
logger.info(f"Syncing cleanup_job for AWS account {prowler_api_provider.uid}")
cartography_aws.run_cleanup_job(
"aws_post_ingestion_principals_cleanup.json",
neo4j_session,
common_job_parameters,
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
logger.info(f"Syncing analysis for AWS account {prowler_api_provider.uid}")
t0 = time.perf_counter()
cartography_aws._perform_aws_analysis(
requested_syncs, neo4j_session, common_job_parameters
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94)
logger.info(
f"Synced analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
return failed_syncs
@@ -234,6 +260,8 @@ def sync_aws_account(
)
try:
func_t0 = time.perf_counter()
# `ecr:image_layers` uses `aioboto3_session` instead of `boto3_session`
if func_name == "ecr:image_layers":
cartography_aws.RESOURCE_FUNCTIONS[func_name](
@@ -257,7 +285,15 @@ def sync_aws_account(
else:
cartography_aws.RESOURCE_FUNCTIONS[func_name](**sync_args)
logger.info(
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s"
)
except Exception as e:
logger.info(
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s (FAILED)"
)
exception_message = utils.stringify_exception(
e, f"Exception for AWS sync function: {func_name}"
)
@@ -8,9 +8,9 @@ from tasks.jobs.attack_paths import aws
# Batch size for Neo4j write operations (resource labeling, cleanup)
BATCH_SIZE = env.int("ATTACK_PATHS_BATCH_SIZE", 1000)
# Batch size for Postgres findings fetch (keyset pagination page size)
FINDINGS_BATCH_SIZE = env.int("ATTACK_PATHS_FINDINGS_BATCH_SIZE", 500)
FINDINGS_BATCH_SIZE = env.int("ATTACK_PATHS_FINDINGS_BATCH_SIZE", 1000)
# Batch size for temp-to-tenant graph sync (nodes and relationships per cursor page)
SYNC_BATCH_SIZE = env.int("ATTACK_PATHS_SYNC_BATCH_SIZE", 250)
SYNC_BATCH_SIZE = env.int("ATTACK_PATHS_SYNC_BATCH_SIZE", 1000)
# Neo4j internal labels (Prowler-specific, not provider-specific)
# - `Internet`: Singleton node representing external internet access for exposed-resource queries
@@ -12,6 +12,7 @@ from typing import Any, Generator
from uuid import UUID
import neo4j
from cartography.config import Config as CartographyConfig
from celery.utils.log import get_task_logger
from tasks.jobs.attack_paths.config import (
@@ -86,17 +87,21 @@ def analysis(
prowler_api_provider: Provider,
scan_id: str,
config: CartographyConfig,
) -> None:
) -> tuple[int, int]:
"""
Main entry point for Prowler findings analysis.
Adds resource labels and loads findings.
Returns (labeled_nodes, findings_loaded).
"""
add_resource_label(
total_labeled = add_resource_label(
neo4j_session, prowler_api_provider.provider, str(prowler_api_provider.uid)
)
findings_data = stream_findings_with_resources(prowler_api_provider, scan_id)
load_findings(neo4j_session, findings_data, prowler_api_provider, config)
total_loaded = load_findings(
neo4j_session, findings_data, prowler_api_provider, config
)
return total_labeled, total_loaded
def add_resource_label(
@@ -146,12 +151,11 @@ def load_findings(
findings_batches: Generator[list[dict[str, Any]], None, None],
prowler_api_provider: Provider,
config: CartographyConfig,
) -> None:
) -> int:
"""Load Prowler findings into the graph, linking them to resources."""
query = render_cypher_template(
INSERT_FINDING_TEMPLATE,
{
"__ROOT_NODE_LABEL__": get_root_node_label(prowler_api_provider.provider),
"__NODE_UID_FIELD__": get_node_uid_field(prowler_api_provider.provider),
"__RESOURCE_LABEL__": get_provider_resource_label(
prowler_api_provider.provider
@@ -160,7 +164,6 @@ def load_findings(
)
parameters = {
"provider_uid": str(prowler_api_provider.uid),
"last_updated": config.update_tag,
"prowler_version": ProwlerConfig.prowler_version,
}
@@ -178,6 +181,7 @@ def load_findings(
neo4j_session.run(query, parameters)
logger.info(f"Finished loading {total_records} records in {batch_num} batches")
return total_records
# Findings Streaming (Generator-based)
@@ -248,7 +252,9 @@ def _fetch_findings_batch(
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
# Use `all_objects` to get `Findings` even on soft-deleted `Providers`
# But even the provider is already validated as active in this context
qs = FindingModel.all_objects.filter(scan_id=scan_id).order_by("id")
qs = FindingModel.all_objects.filter(
tenant_id=tenant_id, scan_id=scan_id
).order_by("id")
if after_id is not None:
qs = qs.filter(id__gt=after_id)
@@ -32,17 +32,14 @@ ADD_RESOURCE_LABEL_TEMPLATE = """
"""
INSERT_FINDING_TEMPLATE = f"""
MATCH (account:__ROOT_NODE_LABEL__ {{id: $provider_uid}})
UNWIND $findings_data AS finding_data
OPTIONAL MATCH (account)-->(resource_by_uid:__RESOURCE_LABEL__)
WHERE resource_by_uid.__NODE_UID_FIELD__ = finding_data.resource_uid
WITH account, finding_data, resource_by_uid
OPTIONAL MATCH (resource_by_uid:__RESOURCE_LABEL__ {{__NODE_UID_FIELD__: finding_data.resource_uid}})
WITH finding_data, resource_by_uid
OPTIONAL MATCH (account)-->(resource_by_id:__RESOURCE_LABEL__)
OPTIONAL MATCH (resource_by_id:__RESOURCE_LABEL__ {{id: finding_data.resource_uid}})
WHERE resource_by_uid IS NULL
AND resource_by_id.id = finding_data.resource_uid
WITH account, finding_data, COALESCE(resource_by_uid, resource_by_id) AS resource
WITH finding_data, COALESCE(resource_by_uid, resource_by_id) AS resource
WHERE resource IS NOT NULL
MERGE (finding:{PROWLER_FINDING_LABEL} {{id: finding_data.id}})
+38 -10
View File
@@ -55,6 +55,7 @@ exception propagates to Celery.
import logging
import time
from typing import Any
from cartography.config import Config as CartographyConfig
@@ -144,6 +145,12 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
attack_paths_scan, task_id, tenant_cartography_config
)
scan_t0 = time.perf_counter()
logger.info(
f"Starting Attack Paths scan ({attack_paths_scan.id}) for "
f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}"
)
subgraph_dropped = False
sync_completed = False
provider_gated = False
@@ -169,6 +176,7 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 2)
# The real scan, where iterates over cloud services
t0 = time.perf_counter()
ingestion_exceptions = utils.call_within_event_loop(
cartography_ingestion_function,
tmp_neo4j_session,
@@ -177,19 +185,23 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
prowler_sdk_provider,
attack_paths_scan,
)
logger.info(
f"Cartography ingestion completed in {time.perf_counter() - t0:.3f}s "
f"(failed_syncs={len(ingestion_exceptions)})"
)
# Post-processing: Just keeping it to be more Cartography compliant
logger.info(
f"Syncing Cartography ontology for AWS account {prowler_api_provider.uid}"
)
cartography_ontology.run(tmp_neo4j_session, tmp_cartography_config)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 95)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94)
logger.info(
f"Syncing Cartography analysis for AWS account {prowler_api_provider.uid}"
)
cartography_analysis.run(tmp_neo4j_session, tmp_cartography_config)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 96)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 95)
# Creating Internet node and CAN_ACCESS relationships
logger.info(
@@ -198,14 +210,20 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
internet.analysis(
tmp_neo4j_session, prowler_api_provider, tmp_cartography_config
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 96)
# Adding Prowler Finding nodes and relationships
logger.info(
f"Syncing Prowler analysis for AWS account {prowler_api_provider.uid}"
)
findings.analysis(
t0 = time.perf_counter()
labeled_nodes, findings_loaded = findings.analysis(
tmp_neo4j_session, prowler_api_provider, scan_id, tmp_cartography_config
)
logger.info(
f"Prowler analysis completed in {time.perf_counter() - t0:.3f}s "
f"(findings={findings_loaded}, labeled_nodes={labeled_nodes})"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 97)
logger.info(
@@ -227,22 +245,33 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
logger.info(f"Deleting existing provider graph in {tenant_database_name}")
db_utils.set_provider_graph_data_ready(attack_paths_scan, False)
provider_gated = True
graph_database.drop_subgraph(
t0 = time.perf_counter()
deleted_nodes = graph_database.drop_subgraph(
database=tenant_database_name,
provider_id=str(prowler_api_provider.id),
)
logger.info(
f"Deleted existing provider graph in {time.perf_counter() - t0:.3f}s "
f"(deleted_nodes={deleted_nodes})"
)
subgraph_dropped = True
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 98)
logger.info(
f"Syncing graph from {tmp_database_name} into {tenant_database_name}"
)
sync.sync_graph(
t0 = time.perf_counter()
sync_result = sync.sync_graph(
source_database=tmp_database_name,
target_database=tenant_database_name,
tenant_id=str(prowler_api_provider.tenant_id),
provider_id=str(prowler_api_provider.id),
)
logger.info(
f"Synced graph in {time.perf_counter() - t0:.3f}s "
f"(nodes={sync_result['nodes']}, relationships={sync_result['relationships']})"
)
sync_completed = True
db_utils.set_graph_data_ready(attack_paths_scan, True)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 99)
@@ -250,17 +279,16 @@ def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]:
logger.info(f"Clearing Neo4j cache for database {tenant_database_name}")
graph_database.clear_cache(tenant_database_name)
logger.info(
f"Completed Cartography ({attack_paths_scan.id}) for "
f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}"
)
logger.info(f"Dropping temporary Neo4j database {tmp_database_name}")
graph_database.drop_database(tmp_database_name)
db_utils.finish_attack_paths_scan(
attack_paths_scan, StateChoices.COMPLETED, ingestion_exceptions
)
logger.info(
f"Attack Paths scan completed in {time.perf_counter() - scan_t0:.3f}s "
f"(state=completed, failed_syncs={len(ingestion_exceptions)})"
)
return ingestion_exceptions
except Exception as e:
@@ -5,6 +5,8 @@ This module handles syncing graph data from temporary scan databases
to the tenant database, adding provider isolation labels and properties.
"""
import time
from collections import defaultdict
from typing import Any
@@ -81,6 +83,7 @@ def sync_nodes(
Source and target sessions are opened sequentially per batch to avoid
holding two Bolt connections simultaneously for the entire sync duration.
"""
t0 = time.perf_counter()
last_id = -1
total_synced = 0
@@ -117,7 +120,7 @@ def sync_nodes(
total_synced += batch_count
logger.info(
f"Synced {total_synced} nodes from {source_database} to {target_database}"
f"Synced {total_synced} nodes from {source_database} to {target_database} in {time.perf_counter() - t0:.3f}s"
)
return total_synced
@@ -136,6 +139,7 @@ def sync_relationships(
Source and target sessions are opened sequentially per batch to avoid
holding two Bolt connections simultaneously for the entire sync duration.
"""
t0 = time.perf_counter()
last_id = -1
total_synced = 0
@@ -166,7 +170,7 @@ def sync_relationships(
total_synced += batch_count
logger.info(
f"Synced {total_synced} relationships from {source_database} to {target_database}"
f"Synced {total_synced} relationships from {source_database} to {target_database} in {time.perf_counter() - t0:.3f}s"
)
return total_synced
+15 -9
View File
@@ -752,11 +752,19 @@ def _process_finding_micro_batch(
)
if mappings_to_create:
ResourceFindingMapping.objects.bulk_create(
created_mappings = ResourceFindingMapping.objects.bulk_create(
mappings_to_create,
batch_size=SCAN_DB_BATCH_SIZE,
ignore_conflicts=True,
unique_fields=["tenant_id", "resource_id", "finding_id"],
)
inserted = sum(1 for m in created_mappings if m.pk)
if inserted != len(mappings_to_create):
logger.error(
f"scan {scan_instance.id}: expected "
f"{len(mappings_to_create)} ResourceFindingMapping rows, "
f"inserted {inserted}. Rolling back micro-batch."
)
# Update finding denormalized arrays
findings_to_update = []
@@ -1804,11 +1812,9 @@ def aggregate_finding_group_summaries(tenant_id: str, scan_id: str):
)
# Aggregate findings by check_id for this scan.
# `pass_count`, `fail_count` and `manual_count` count *every* finding
# in this group, regardless of mute state, so the aggregated `status`
# always reflects the underlying check outcome (FAIL > PASS > MANUAL)
# even when the group is fully muted. The orthogonal `muted` flag is
# what tells whether the group has any actionable (non-muted) findings.
# `pass_count`, `fail_count` and `manual_count` only count non-muted
# findings. Muted findings are tracked separately via the
# `*_muted_count` fields.
aggregated = (
Finding.objects.filter(
tenant_id=tenant_id,
@@ -1817,9 +1823,9 @@ def aggregate_finding_group_summaries(tenant_id: str, scan_id: str):
.values("check_id")
.annotate(
severity_order=Max(severity_case),
pass_count=Count("id", filter=Q(status="PASS")),
fail_count=Count("id", filter=Q(status="FAIL")),
manual_count=Count("id", filter=Q(status="MANUAL")),
pass_count=Count("id", filter=Q(status="PASS", muted=False)),
fail_count=Count("id", filter=Q(status="FAIL", muted=False)),
manual_count=Count("id", filter=Q(status="MANUAL", muted=False)),
pass_muted_count=Count("id", filter=Q(status="PASS", muted=True)),
fail_muted_count=Count("id", filter=Q(status="FAIL", muted=True)),
manual_muted_count=Count("id", filter=Q(status="MANUAL", muted=True)),
@@ -38,11 +38,14 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.finish_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph", return_value=0)
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -188,7 +191,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -287,7 +290,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -390,7 +393,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -489,14 +492,17 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch(
"tasks.jobs.attack_paths.scan.graph_database.drop_subgraph",
side_effect=RuntimeError("drop failed"),
)
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -609,7 +615,7 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph")
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -718,11 +724,14 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch("tasks.jobs.attack_paths.scan.graph_database.drop_subgraph")
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -833,14 +842,17 @@ class TestAttackPathsRun:
@patch("tasks.jobs.attack_paths.scan.db_utils.set_provider_graph_data_ready")
@patch("tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress")
@patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan")
@patch("tasks.jobs.attack_paths.scan.sync.sync_graph")
@patch(
"tasks.jobs.attack_paths.scan.sync.sync_graph",
return_value={"nodes": 0, "relationships": 0},
)
@patch(
"tasks.jobs.attack_paths.scan.graph_database.drop_subgraph",
side_effect=RuntimeError("drop failed"),
)
@patch("tasks.jobs.attack_paths.scan.indexes.create_sync_indexes")
@patch("tasks.jobs.attack_paths.scan.internet.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis")
@patch("tasks.jobs.attack_paths.scan.findings.analysis", return_value=(0, 0))
@patch("tasks.jobs.attack_paths.scan.indexes.create_findings_indexes")
@patch("tasks.jobs.attack_paths.scan.cartography_ontology.run")
@patch("tasks.jobs.attack_paths.scan.cartography_analysis.run")
@@ -1274,10 +1286,6 @@ class TestAttackPathsFindingsHelpers:
mock_session = MagicMock()
with (
patch(
"tasks.jobs.attack_paths.findings.get_root_node_label",
return_value="AWSAccount",
),
patch(
"tasks.jobs.attack_paths.findings.get_node_uid_field",
return_value="arn",
@@ -1294,7 +1302,6 @@ class TestAttackPathsFindingsHelpers:
assert mock_session.run.call_count == 2
for call_args in mock_session.run.call_args_list:
params = call_args.args[1]
assert params["provider_uid"] == str(provider.uid)
assert params["last_updated"] == config.update_tag
assert "findings_data" in params
@@ -1673,10 +1680,6 @@ class TestAttackPathsFindingsHelpers:
yield # Make it a generator
with (
patch(
"tasks.jobs.attack_paths.findings.get_root_node_label",
return_value="AWSAccount",
),
patch(
"tasks.jobs.attack_paths.findings.get_node_uid_field",
return_value="arn",
@@ -121,8 +121,8 @@ To update the environment file:
Edit the `.env` file and change version values:
```env
PROWLER_UI_VERSION="5.23.0"
PROWLER_API_VERSION="5.23.0"
PROWLER_UI_VERSION="5.24.0"
PROWLER_API_VERSION="5.24.0"
```
<Note>
Generated
+74 -20
View File
@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.3.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -1267,19 +1267,19 @@ typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-resource"
version = "23.3.0"
version = "24.0.0"
description = "Microsoft Azure Resource Management Client Library for Python"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_mgmt_resource-23.3.0-py3-none-any.whl", hash = "sha256:ab216ee28e29db6654b989746e0c85a1181f66653929d2cb6e48fba66d9af323"},
{file = "azure_mgmt_resource-23.3.0.tar.gz", hash = "sha256:fc4f1fd8b6aad23f8af4ed1f913df5f5c92df117449dc354fea6802a2829fea4"},
{file = "azure_mgmt_resource-24.0.0-py3-none-any.whl", hash = "sha256:27b32cd223e2784269f5a0db3c282042886ee4072d79cedc638438ece7cd0df4"},
{file = "azure_mgmt_resource-24.0.0.tar.gz", hash = "sha256:cf6b8995fcdd407ac9ff1dd474087129429a1d90dbb1ac77f97c19b96237b265"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
azure-mgmt-core = ">=1.5.0"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
@@ -1425,6 +1425,64 @@ typing-extensions = ">=4.6.0"
[package.extras]
aio = ["azure-core[aio] (>=1.30.0)"]
[[package]]
name = "backports-datetime-fromisoformat"
version = "2.0.3"
description = "Backport of Python 3.11's datetime.fromisoformat"
optional = false
python-versions = ">3"
groups = ["dev"]
markers = "python_version == \"3.10\""
files = [
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f681f638f10588fa3c101ee9ae2b63d3734713202ddfcfb6ec6cea0778a29d4"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:cd681460e9142f1249408e5aee6d178c6d89b49e06d44913c8fdfb6defda8d1c"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ee68bc8735ae5058695b76d3bb2aee1d137c052a11c8303f1e966aa23b72b65b"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8273fe7932db65d952a43e238318966eab9e49e8dd546550a41df12175cc2be4"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39d57ea50aa5a524bb239688adc1d1d824c31b6094ebd39aa164d6cadb85de22"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ac6272f87693e78209dc72e84cf9ab58052027733cd0721c55356d3c881791cf"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:44c497a71f80cd2bcfc26faae8857cf8e79388e3d5fbf79d2354b8c360547d58"},
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:6335a4c9e8af329cb1ded5ab41a666e1448116161905a94e054f205aa6d263bc"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2e4b66e017253cdbe5a1de49e0eecff3f66cd72bcb1229d7db6e6b1832c0443"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:43e2d648e150777e13bbc2549cc960373e37bf65bd8a5d2e0cef40e16e5d8dd0"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:4ce6326fd86d5bae37813c7bf1543bae9e4c215ec6f5afe4c518be2635e2e005"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7c8fac333bf860208fd522a5394369ee3c790d0aa4311f515fcc4b6c5ef8d75"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4da5ab3aa0cc293dc0662a0c6d1da1a011dc1edcbc3122a288cfed13a0b45"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58ea11e3bf912bd0a36b0519eae2c5b560b3cb972ea756e66b73fb9be460af01"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8a375c7dbee4734318714a799b6c697223e4bbb57232af37fbfff88fb48a14c6"},
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:ac677b1664c4585c2e014739f6678137c8336815406052349c85898206ec7061"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66ce47ee1ba91e146149cf40565c3d750ea1be94faf660ca733d8601e0848147"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8b7e069910a66b3bba61df35b5f879e5253ff0821a70375b9daf06444d046fa4"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:a3b5d1d04a9e0f7b15aa1e647c750631a873b298cdd1255687bb68779fe8eb35"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1b95986430e789c076610aea704db20874f0781b8624f648ca9fb6ef67c6e1"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffe5f793db59e2f1d45ec35a1cf51404fdd69df9f6952a0c87c3060af4c00e32"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:620e8e73bd2595dfff1b4d256a12b67fce90ece3de87b38e1dde46b910f46f4d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4cf9c0a985d68476c1cabd6385c691201dda2337d7453fb4da9679ce9f23f4e7"},
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:d144868a73002e6e2e6fef72333e7b0129cecdd121aa8f1edba7107fd067255d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e81b26497a17c29595bc7df20bc6a872ceea5f8c9d6537283945d4b6396aec10"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:5ba00ead8d9d82fd6123eb4891c566d30a293454e54e32ff7ead7644f5f7e575"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:24d574cb4072e1640b00864e94c4c89858033936ece3fc0e1c6f7179f120d0a8"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9735695a66aad654500b0193525e590c693ab3368478ce07b34b443a1ea5e824"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63d39709e17eb72685d052ac82acf0763e047f57c86af1b791505b1fec96915d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1ea2cc84224937d6b9b4c07f5cb7c667f2bde28c255645ba27f8a675a7af8234"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4024e6d35a9fdc1b3fd6ac7a673bd16cb176c7e0b952af6428b7129a70f72cce"},
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5e2dcc94dc9c9ab8704409d86fcb5236316e9dcef6feed8162287634e3568f4c"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa2de871801d824c255fac7e5e7e50f2be6c9c376fd9268b40c54b5e9da91f42"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:1314d4923c1509aa9696712a7bc0c7160d3b7acf72adafbbe6c558d523f5d491"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b750ecba3a8815ad8bc48311552f3f8ab99dd2326d29df7ff670d9c49321f48f"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d5117dce805d8a2f78baeddc8c6127281fa0a5e2c40c6dd992ba6b2b367876"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb35f607bd1cbe37b896379d5f5ed4dc298b536f4b959cb63180e05cacc0539d"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:61c74710900602637d2d145dda9720c94e303380803bf68811b2a151deec75c2"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ece59af54ebf67ecbfbbf3ca9066f5687879e36527ad69d8b6e3ac565d565a62"},
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:d0a7c5f875068efe106f62233bc712d50db4d07c13c7db570175c7857a7b5dbd"},
{file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90e202e72a3d5aae673fcc8c9a4267d56b2f532beeb9173361293625fe4d2039"},
{file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2df98ef1b76f5a58bb493dda552259ba60c3a37557d848e039524203951c9f06"},
{file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7100adcda5e818b5a894ad0626e38118bb896a347f40ebed8981155675b9ba7b"},
{file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e410383f5d6a449a529d074e88af8bc80020bb42b402265f9c02c8358c11da5"},
{file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2797593760da6bcc32c4a13fa825af183cd4bfd333c60b3dbf84711afca26ef"},
{file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35a144fd681a0bea1013ccc4cd3fd4dc758ea17ee23dca019c02b82ec46fc0c4"},
{file = "backports_datetime_fromisoformat-2.0.3.tar.gz", hash = "sha256:b58edc8f517b66b397abc250ecc737969486703a66eb97e01e6d51291b1a139d"},
]
[[package]]
name = "bandit"
version = "1.8.3"
@@ -3350,23 +3408,19 @@ files = [
[[package]]
name = "marshmallow"
version = "3.26.2"
version = "4.3.0"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.9"
python-versions = ">=3.10"
groups = ["dev"]
files = [
{file = "marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73"},
{file = "marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57"},
{file = "marshmallow-4.3.0-py3-none-any.whl", hash = "sha256:46c4fe6984707e3cbd485dfebbf0a59874f58d695aad05c1668d15e8c6e13b46"},
{file = "marshmallow-4.3.0.tar.gz", hash = "sha256:fb43c53b3fe240b8f6af37223d6ef1636f927ad9bea8ab323afad95dff090880"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
tests = ["pytest", "simplejson"]
backports-datetime-fromisoformat = {version = "*", markers = "python_version < \"3.11\""}
typing-extensions = {version = "*", markers = "python_version < \"3.11\""}
[[package]]
name = "mccabe"
@@ -3662,14 +3716,14 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.23.0"
version = "1.55.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "msgraph_sdk-1.23.0-py3-none-any.whl", hash = "sha256:58e0047b4ca59fd82022c02cd73fec0170a3d84f3b76721e3db2a0314df9a58a"},
{file = "msgraph_sdk-1.23.0.tar.gz", hash = "sha256:6dd1ba9a46f5f0ce8599fd9610133adbd9d1493941438b5d3632fce9e55ed607"},
{file = "msgraph_sdk-1.55.0-py3-none-any.whl", hash = "sha256:c8e68ebc4b88af5111de312e7fa910a4e76ddf48a4534feadb1fb8a411c48cfc"},
{file = "msgraph_sdk-1.55.0.tar.gz", hash = "sha256:6df691a31954a050d26b8a678968017e157d940fb377f2a8a4e17a9741b98756"},
]
[package.dependencies]
@@ -6681,4 +6735,4 @@ files = [
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<3.13"
content-hash = "786921163bb46716defae1d9de1df001af2abf17edd3061165638707bcd28ce4"
content-hash = "09ce4507a464b318702ed8c6a738f3bb1bc4cc6ff5a50a9c2884f560af9ab034"
+21 -1
View File
@@ -2,6 +2,26 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [5.24.1] (Prowler v5.24.1)
### 🚀 Added
- `entra_pim_role_usage_alert_exists` check for m365 provider [(#10799)](https://github.com/prowler-cloud/prowler/pull/10799)
### 🔄 Changed
- `msgraph-sdk` from 1.23.0 to 1.55.0 and `azure-mgmt-resource` from 23.3.0 to 24.0.0, removing `marshmallow` as is a transitively dev dependency [(#10733)](https://github.com/prowler-cloud/prowler/pull/10733)
### 🐞 Fixed
- Cloudflare account-scoped API tokens failing connection test in the App with `CloudflareUserTokenRequiredError` [(#10723)](https://github.com/prowler-cloud/prowler/pull/10723)
- `prowler image --registry` failing with `ImageNoImagesProvidedError` due to registry arguments not being forwarded to `ImageProvider` in `init_global_provider` [(#10470)](https://github.com/prowler-cloud/prowler/pull/10470)
- Google Workspace Calendar checks false FAIL on unconfigured settings with secure Google defaults [(#10726)](https://github.com/prowler-cloud/prowler/pull/10726)
- Google Workspace Drive checks false FAIL on unconfigured settings with secure Google defaults [(#10727)](https://github.com/prowler-cloud/prowler/pull/10727)
- Cloudflare `validate_credentials` can hang in an infinite pagination loop when the SDK repeats accounts, blocking connection tests [(#10771)](https://github.com/prowler-cloud/prowler/pull/10771)
---
## [5.24.0] (Prowler v5.24.0)
### 🚀 Added
@@ -26,6 +46,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
### 🐞 Fixed
- `prowler image --registry-list` crashes with `AttributeError` because `ImageProvider.__init__` returns early before registering the global provider [(#10691)](https://github.com/prowler-cloud/prowler/pull/10691)
- Vercel firewall config handling for team-scoped projects and current API response shapes [(#10695)](https://github.com/prowler-cloud/prowler/pull/10695)
---
@@ -77,7 +98,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Oracle Cloud `kms_key_rotation_enabled` now checks current key version age to avoid false positives on vaults without auto-rotation support [(#10450)](https://github.com/prowler-cloud/prowler/pull/10450)
- OCI filestorage, blockstorage, KMS, and compute services now honor `--region` for scanning outside the tenancy home region [(#10472)](https://github.com/prowler-cloud/prowler/pull/10472)
- OCI provider now supports multi-region filtering via `--region` [(#10473)](https://github.com/prowler-cloud/prowler/pull/10473)
- `prowler image --registry` failing with `ImageNoImagesProvidedError` due to registry arguments not being forwarded to `ImageProvider` in `init_global_provider` [(#10470)](https://github.com/prowler-cloud/prowler/pull/10470)
- OCI multi-region support for identity client configuration in blockstorage, identity, and filestorage services [(#10520)](https://github.com/prowler-cloud/prowler/pull/10520)
- Google Workspace Calendar checks now filter for customer-level policies only, skipping OU and group overrides that could produce incorrect audit results [(#10658)](https://github.com/prowler-cloud/prowler/pull/10658)
+4
View File
@@ -293,6 +293,10 @@ def prowler():
if not args.only_logs:
global_provider.print_credentials()
# --registry-list: listing already printed during provider init, exit
if getattr(global_provider, "_listing_only", False):
sys.exit()
# Skip service and check loading for external-tool providers
if provider not in EXTERNAL_TOOL_PROVIDERS:
# Import custom checks from folder
+3 -1
View File
@@ -1502,7 +1502,9 @@
{
"Id": "5.3.1",
"Description": "Microsoft Entra Privileged Identity Management can be used to audit roles, allow just in time activation of roles and allow for periodic role attestation. Organizations should remove permanent members from privileged Office 365 roles and instead make them eligible, through a JIT activation workflow.",
"Checks": [],
"Checks": [
"entra_pim_role_usage_alert_exists"
],
"Attributes": [
{
"Section": "5 Microsoft Entra admin center",
+3 -1
View File
@@ -1803,7 +1803,9 @@
{
"Id": "5.3.1",
"Description": "Microsoft Entra Privileged Identity Management can be used to audit roles, allow just in time activation of roles and allow for periodic role attestation. Organizations should remove permanent members from privileged Office 365 roles and instead make them eligible, through a JIT activation workflow. Ensure 'Privileged Identity Management' is used to manage roles.",
"Checks": [],
"Checks": [
"entra_pim_role_usage_alert_exists"
],
"Attributes": [
{
"Section": "5 Microsoft Entra admin center",
@@ -281,6 +281,7 @@
"Checks": [
"entra_admin_portals_access_restriction",
"entra_app_registration_no_unused_privileged_permissions",
"entra_pim_role_usage_alert_exists",
"entra_policy_guest_users_access_restrictions",
"sharepoint_external_sharing_managed",
"sharepoint_external_sharing_restricted",
@@ -672,6 +673,7 @@
"entra_admin_users_sign_in_frequency_enabled",
"entra_break_glass_account_fido2_security_key_registered",
"entra_app_registration_no_unused_privileged_permissions",
"entra_pim_role_usage_alert_exists",
"entra_policy_ensure_default_user_cannot_create_tenants",
"entra_policy_guest_invite_only_for_admin_roles",
"entra_seamless_sso_disabled"
+1 -1
View File
@@ -38,7 +38,7 @@ class _MutableTimestamp:
timestamp = _MutableTimestamp(datetime.today())
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
prowler_version = "5.24.0"
prowler_version = "5.25.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
@@ -274,8 +274,12 @@ class CloudflareProvider(Provider):
for account in client.accounts.list():
account_id = getattr(account, "id", None)
# Prevent infinite loop - skip if we've seen this account
# Prevent infinite loop on repeated pages from the SDK paginator
if account_id in seen_account_ids:
logger.warning(
"Detected repeated Cloudflare account ID while listing accounts. "
"Stopping pagination to avoid an infinite loop."
)
break
seen_account_ids.add(account_id)
@@ -332,19 +336,16 @@ class CloudflareProvider(Provider):
return
except PermissionDeniedError as error:
error_str = str(error)
# Check for user-level authentication required (code 9109)
if "9109" in error_str:
logger.error(f"CloudflareUserTokenRequiredError: {error}")
raise CloudflareUserTokenRequiredError(
file=os.path.basename(__file__),
)
# Check for invalid API key or email (code 9103) - comes as 403
if "9103" in error_str or "Unknown X-Auth-Key" in error_str:
logger.error(f"CloudflareInvalidAPIKeyError: {error}")
raise CloudflareInvalidAPIKeyError(
file=os.path.basename(__file__),
)
# For other permission errors, try accounts.list() as fallback
# For permission errors (including 9109 account-scoped tokens),
# try accounts.list() as fallback before failing.
# Error 9109 means the token is account-scoped, not user-level,
# which is valid for scanning — only fail if accounts.list() also fails.
logger.warning(
f"Unable to retrieve Cloudflare user info: {error}. "
"Trying accounts.list() as fallback."
@@ -398,7 +399,20 @@ class CloudflareProvider(Provider):
# Fallback: try accounts.list()
try:
accounts = list(client.accounts.list())
accounts: list = []
seen_account_ids: set = set()
for account in client.accounts.list():
account_id = getattr(account, "id", None)
# Prevent infinite loop on repeated pages from the SDK paginator
if account_id in seen_account_ids:
logger.warning(
"Detected repeated Cloudflare account ID while validating credentials. "
"Stopping pagination to avoid an infinite loop."
)
break
seen_account_ids.add(account_id)
accounts.append(account)
if not accounts:
logger.error("CloudflareNoAccountsError: No accounts found")
raise CloudflareNoAccountsError(
@@ -35,21 +35,20 @@ class calendar_external_invitations_warning(Check):
f"External invitation warnings for Google Calendar are enabled "
f"in domain {calendar_client.provider.identity.domain}."
)
elif warning_enabled is None:
report.status = "PASS"
report.status_extended = (
f"External invitation warnings for Google Calendar use Google's "
f"secure default configuration (enabled) "
f"in domain {calendar_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warning_enabled is None:
report.status_extended = (
f"External invitation warnings for Google Calendar are not "
f"explicitly configured in domain "
f"{calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
else:
report.status_extended = (
f"External invitation warnings for Google Calendar are disabled "
f"in domain {calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
report.status_extended = (
f"External invitation warnings for Google Calendar are disabled "
f"in domain {calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
findings.append(report)
@@ -36,20 +36,20 @@ class calendar_external_sharing_primary_calendar(Check):
f"{calendar_client.provider.identity.domain} is restricted to "
f"free/busy information only."
)
elif sharing is None:
report.status = "PASS"
report.status_extended = (
f"Primary calendar external sharing uses Google's secure default "
f"configuration (free/busy only) "
f"in domain {calendar_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if sharing is None:
report.status_extended = (
f"Primary calendar external sharing is not explicitly configured "
f"in domain {calendar_client.provider.identity.domain}. "
f"External sharing should be restricted to free/busy information only."
)
else:
report.status_extended = (
f"Primary calendar external sharing in domain "
f"{calendar_client.provider.identity.domain} is set to {sharing}. "
f"External sharing should be restricted to free/busy information only."
)
report.status_extended = (
f"Primary calendar external sharing in domain "
f"{calendar_client.provider.identity.domain} is set to {sharing}. "
f"External sharing should be restricted to free/busy information only."
)
findings.append(report)
@@ -33,21 +33,20 @@ class drive_external_sharing_warn_users(Check):
f"External sharing warnings for Drive and Docs are enabled "
f"in domain {drive_client.provider.identity.domain}."
)
elif warning_enabled is None:
report.status = "PASS"
report.status_extended = (
f"External sharing warnings for Drive and Docs use Google's "
f"secure default configuration (enabled) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warning_enabled is None:
report.status_extended = (
f"External sharing warnings for Drive and Docs are not "
f"explicitly configured in domain "
f"{drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
else:
report.status_extended = (
f"External sharing warnings for Drive and Docs are disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
report.status_extended = (
f"External sharing warnings for Drive and Docs are disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
findings.append(report)
@@ -35,22 +35,21 @@ class drive_shared_drive_creation_allowed(Check):
f"Users in domain {drive_client.provider.identity.domain} "
f"are allowed to create new shared drives."
)
elif allow_creation is None:
report.status = "PASS"
report.status_extended = (
f"Shared drive creation uses Google's secure default "
f"configuration (allowed) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if allow_creation is None:
report.status_extended = (
f"Shared drive creation is not explicitly configured in "
f"domain {drive_client.provider.identity.domain}. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
else:
report.status_extended = (
f"Users in domain {drive_client.provider.identity.domain} "
f"are prevented from creating new shared drives. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
report.status_extended = (
f"Users in domain {drive_client.provider.identity.domain} "
f"are prevented from creating new shared drives. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
findings.append(report)
@@ -35,21 +35,21 @@ class drive_shared_drive_disable_download_print_copy(Check):
f"{drive_client.provider.identity.domain} is restricted to "
f"{allowed}."
)
elif allowed is None:
report.status = "PASS"
report.status_extended = (
f"Download, print, and copy restrictions for shared drives use "
f"Google's secure default configuration (disabled for viewers "
f"and commenters) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if allowed is None:
report.status_extended = (
f"Download, print, and copy restrictions for shared drive "
f"viewers and commenters are not explicitly configured in "
f"domain {drive_client.provider.identity.domain}. "
f"These actions should be restricted to editors or managers only."
)
else:
report.status_extended = (
f"Download, print, and copy in shared drives in domain "
f"{drive_client.provider.identity.domain} is set to {allowed}. "
f"These actions should be restricted to editors or managers only."
)
report.status_extended = (
f"Download, print, and copy in shared drives in domain "
f"{drive_client.provider.identity.domain} is set to {allowed}. "
f"These actions should be restricted to editors or managers only."
)
findings.append(report)
@@ -36,21 +36,20 @@ class drive_warn_sharing_with_allowlisted_domains(Check):
f"Users are warned when sharing files with allowlisted "
f"domains in domain {drive_client.provider.identity.domain}."
)
elif warn_enabled is None:
report.status = "PASS"
report.status_extended = (
f"Warning when sharing with allowlisted domains uses Google's "
f"secure default configuration (enabled) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warn_enabled is None:
report.status_extended = (
f"Warning when sharing with allowlisted domains is not "
f"explicitly configured in domain "
f"{drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
else:
report.status_extended = (
f"Warning when sharing with allowlisted domains is disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
report.status_extended = (
f"Warning when sharing with allowlisted domains is disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
findings.append(report)
+41 -33
View File
@@ -163,42 +163,50 @@ class ImageProvider(Provider):
# Registry scan mode: enumerate images from registry
if self.registry:
self._enumerate_registry()
if self._listing_only:
return
for image in self.images:
self._validate_image_name(image)
if not self.images:
raise ImageNoImagesProvidedError(
file=__file__,
message="No images provided for scanning.",
)
# Audit Config
if config_content:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(self._type, config_path)
# Fixer Config
self._fixer_config = fixer_config if fixer_config is not None else {}
# Mutelist (not needed for Image provider since Trivy has its own logic)
# Safe defaults for listing-only mode (overwritten below in scan mode)
self._audit_config = {}
self._fixer_config = {}
self._mutelist = None
self.audit_metadata = None
self.audit_metadata = Audit_Metadata(
provider=self._type,
account_id=self.audited_account,
account_name="image",
region=self.region,
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
)
# Skip scan setup for listing-only mode
if not self._listing_only:
for image in self.images:
self._validate_image_name(image)
if not self.images:
raise ImageNoImagesProvidedError(
file=__file__,
message="No images provided for scanning.",
)
# Audit Config
if config_content:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(
self._type, config_path
)
# Fixer Config
self._fixer_config = fixer_config if fixer_config is not None else {}
# Mutelist (not needed for Image provider since Trivy has its own logic)
self._mutelist = None
self.audit_metadata = Audit_Metadata(
provider=self._type,
account_id=self.audited_account,
account_name="image",
region=self.region,
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
)
Provider.set_global_provider(self)
@@ -0,0 +1,38 @@
{
"Provider": "m365",
"CheckID": "entra_pim_role_usage_alert_exists",
"CheckTitle": "PIM alert for unused privileged roles monitors stale role assignments",
"CheckType": [],
"ServiceName": "entra",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "NotDefined",
"ResourceGroup": "IAM",
"Description": "Privileged Identity Management (PIM) can be configured to alert when **privileged roles are not being used**. This alert detects stale role assignments where administrators have not exercised their assigned privileges, helping identify unnecessary access that should be reviewed or removed.",
"Risk": "Without monitoring for unused privileged roles, **stale role assignments** accumulate undetected. Dormant privileged accounts become targets for **credential theft** and **lateral movement**, expanding the attack surface. Attackers who compromise an unused admin account gain elevated access with lower detection risk.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://learn.microsoft.com/en-us/entra/id-governance/privileged-identity-management/pim-how-to-configure-security-alerts",
"https://learn.microsoft.com/en-us/entra/id-governance/privileged-identity-management/pim-security-alerts"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "1. In the Microsoft Entra admin center, go to Identity governance > Privileged Identity Management > Microsoft Entra roles > Alerts.\n2. Locate the alert **Administrators aren't using their privileged roles**.\n3. Click the alert to review its settings and ensure it is enabled.\n4. Configure the alert threshold and notification settings as needed.\n5. Review any flagged stale role assignments and remove unnecessary access.",
"Terraform": ""
},
"Recommendation": {
"Text": "Enable PIM alerts for **unused privileged roles** to detect stale assignments. Apply the **principle of least privilege** by regularly reviewing role assignments and removing access that is no longer needed. Use **time-bound eligible assignments** instead of permanent active roles.",
"Url": "https://hub.prowler.com/check/entra_pim_role_usage_alert_exists"
}
},
"Categories": [
"identity-access",
"e5"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}
@@ -0,0 +1,50 @@
from typing import List
from prowler.lib.check.models import Check, CheckReportM365
from prowler.providers.m365.services.entra.entra_client import entra_client
# The alert definition ID for "Administrators aren't using their privileged roles"
# (also known as the StaleSignInAlert or inactive role assignment alert).
STALE_SIGN_IN_ALERT_DEFINITION_ID = "DirectoryRoleInactiveAlertDefinition"
class entra_pim_role_usage_alert_exists(Check):
"""
Ensure that the PIM alert for unused privileged roles is configured and active.
This check verifies that Privileged Identity Management (PIM) is configured
to alert when administrators are not using their assigned privileged roles,
helping detect stale or unnecessary role assignments.
- PASS: The PIM alert for unused privileged roles exists and is active.
- FAIL: The PIM alert for unused privileged roles does not exist or is not active.
"""
def execute(self) -> List[CheckReportM365]:
"""Execute the check logic.
Returns:
A list of reports containing the result of the check.
"""
findings = []
report = CheckReportM365(
metadata=self.metadata(),
resource={},
resource_name="PIM Role Usage Alert",
resource_id="pimRoleUsageAlert",
)
report.status = "FAIL"
report.status_extended = "PIM alert for unused privileged roles does not exist or is not active."
for alert in entra_client.pim_alerts:
if (
STALE_SIGN_IN_ALERT_DEFINITION_ID
in alert.alert_definition_id
and alert.is_active
):
report.status = "PASS"
report.status_extended = "PIM alert for unused privileged roles exists and is active."
break
findings.append(report)
return findings
@@ -36,6 +36,7 @@ class Entra(M365Service):
user_accounts_status (dict): Dictionary of user account statuses.
oauth_apps (dict): Dictionary of OAuth applications from Defender XDR.
authentication_method_configurations (dict): Dictionary of authentication method configurations.
pim_alerts (list): List of PIM alerts configured in the tenant.
"""
def __init__(self, provider: M365Provider):
@@ -83,6 +84,7 @@ class Entra(M365Service):
self._get_oauth_apps(),
self._get_directory_sync_settings(),
self._get_authentication_method_configurations(),
self._get_pim_alerts(),
)
)
@@ -98,6 +100,7 @@ class Entra(M365Service):
self.authentication_method_configurations: Dict[
str, AuthenticationMethodConfiguration
] = attributes[9]
self.pim_alerts: List[PIMAlert] = attributes[10]
self.user_accounts_status = {}
if created_loop:
@@ -1019,6 +1022,45 @@ OAuthAppInfo
return oauth_apps
async def _get_pim_alerts(self):
"""Retrieve PIM (Privileged Identity Management) alerts from Microsoft Entra.
Fetches PIM alerts from the identity governance API to determine which
alert policies are configured and active in the tenant, including
alerts for unused privileged roles.
Returns:
list[PIMAlert]: A list of PIM alerts configured in the tenant,
or an empty list if retrieval fails.
"""
logger.info("Entra - Getting PIM alerts...")
pim_alerts = []
try:
alerts_response = (
await self.client.identity_governance.role_management_alerts.alerts.get()
)
for alert in getattr(alerts_response, "value", []) or []:
pim_alerts.append(
PIMAlert(
id=getattr(alert, "id", ""),
alert_definition_id=getattr(
alert, "alert_definition_id", ""
),
scope_id=getattr(alert, "scope_id", "") or "",
scope_type=getattr(alert, "scope_type", "") or "",
is_active=getattr(alert, "is_active", False) or False,
number_of_affected_items=getattr(
alert, "number_of_affected_items", 0
)
or 0,
)
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return pim_alerts
async def _get_authentication_method_configurations(self):
"""Retrieve authentication method configurations from Microsoft Entra.
@@ -1481,3 +1523,23 @@ class OAuthApp(BaseModel):
is_admin_consented: bool = False
last_used_time: Optional[str] = None
app_origin: str = ""
class PIMAlert(BaseModel):
"""Model representing a PIM (Privileged Identity Management) alert.
Attributes:
id: The unique identifier for the alert.
alert_definition_id: The identifier of the alert definition type.
scope_id: The scope ID (typically the tenant ID).
scope_type: The scope type (e.g., 'DirectoryRole').
is_active: Whether the alert is currently active/enabled.
number_of_affected_items: The number of items affected by the alert.
"""
id: str
alert_definition_id: str
scope_id: str = ""
scope_type: str = ""
is_active: bool = False
number_of_affected_items: int = 0
+3 -4
View File
@@ -30,7 +30,7 @@ dependencies = [
"azure-mgmt-postgresqlflexibleservers==1.1.0",
"azure-mgmt-recoveryservices==3.1.0",
"azure-mgmt-recoveryservicesbackup==9.2.0",
"azure-mgmt-resource==23.3.0",
"azure-mgmt-resource==24.0.0",
"azure-mgmt-search==9.1.0",
"azure-mgmt-security==7.0.0",
"azure-mgmt-sql==3.0.1",
@@ -57,7 +57,7 @@ dependencies = [
"kubernetes==32.0.1",
"markdown==3.10.2",
"microsoft-kiota-abstractions==1.9.2",
"msgraph-sdk==1.23.0",
"msgraph-sdk==1.55.0",
"numpy==2.0.2",
"openstacksdk==4.2.0",
"pandas==2.2.3",
@@ -95,7 +95,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">=3.10,<3.13"
version = "5.24.0"
version = "5.25.0"
[project.scripts]
prowler = "prowler.__main__:prowler"
@@ -121,7 +121,6 @@ docker = "7.1.0"
filelock = "3.20.3"
flake8 = "7.1.2"
freezegun = "1.5.1"
marshmallow = "==3.26.2"
mock = "5.2.0"
moto = {extras = ["all"], version = "5.1.11"}
openapi-schema-validator = "0.6.3"
@@ -433,6 +433,29 @@ class TestCloudflareValidateCredentials:
with pytest.raises(CloudflareNoAccountsError):
CloudflareProvider.validate_credentials(session)
def test_validate_credentials_breaks_on_repeated_account_ids(self):
"""Pagination must stop when the SDK repeats account IDs to avoid infinite loops."""
def repeating_accounts():
account = MagicMock()
account.id = ACCOUNT_ID
while True:
yield account
mock_client = MagicMock()
mock_client.user.get.side_effect = Exception("Some other error")
mock_client.accounts.list.return_value = repeating_accounts()
session = CloudflareSession(
client=mock_client,
api_token=API_TOKEN,
api_key=None,
api_email=None,
)
# Must return without hanging; repeated IDs break the loop.
CloudflareProvider.validate_credentials(session)
class TestCloudflareTestConnection:
"""Tests for test_connection method."""
@@ -73,8 +73,8 @@ class TestCalendarExternalInvitationsWarning:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure (enabled)"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -100,8 +100,8 @@ class TestCalendarExternalInvitationsWarning:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -104,8 +104,8 @@ class TestCalendarExternalSharingPrimaryCalendar:
assert findings[0].status == "FAIL"
assert "EXTERNAL_ALL_INFO_READ_WRITE" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure (free/busy only)"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -131,8 +131,8 @@ class TestCalendarExternalSharingPrimaryCalendar:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -67,8 +67,8 @@ class TestDriveExternalSharingWarnUsers:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -92,8 +92,8 @@ class TestDriveExternalSharingWarnUsers:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -69,8 +69,8 @@ class TestDriveSharedDriveCreationAllowed:
assert findings[0].status == "FAIL"
assert "prevented" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -94,8 +94,8 @@ class TestDriveSharedDriveCreationAllowed:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -101,8 +101,8 @@ class TestDriveSharedDriveDisableDownloadPrintCopy:
assert findings[0].status == "FAIL"
assert "ALL" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -128,8 +128,8 @@ class TestDriveSharedDriveDisableDownloadPrintCopy:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -71,8 +71,8 @@ class TestDriveWarnSharingWithAllowlistedDomains:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -98,8 +98,8 @@ class TestDriveWarnSharingWithAllowlistedDomains:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -1185,3 +1185,58 @@ class TestInitGlobalProviderRegistryEnumeration:
# The "other/lib" repo should be filtered out by --image-filter
assert not any("other/lib" in img for img in provider.images)
assert len(provider.images) == 3
class TestRegistryListMode:
"""Regression test: `prowler image --registry <url> --registry-list` crashes.
When --registry-list is passed, ImageProvider._enumerate_registry sets
_listing_only = True and __init__ returns early before calling
Provider.set_global_provider(self). The caller in __main__.py then calls
global_provider.print_credentials() on a None reference, raising
AttributeError: 'NoneType' object has no attribute 'print_credentials'.
"""
@patch("prowler.providers.image.image_provider.create_registry_adapter")
@patch("prowler.providers.common.provider.load_and_validate_config_file")
def test_registry_list_does_not_crash(self, mock_load_config, mock_adapter_factory):
"""Reproduce the --registry-list crash by running the same sequence
as __main__.py: init_global_provider, get_global_provider,
then print_credentials."""
mock_load_config.return_value = {}
adapter = MagicMock()
adapter.list_repositories.return_value = ["myorg/app"]
adapter.list_tags.return_value = ["v1.0", "latest"]
mock_adapter_factory.return_value = adapter
arguments = Namespace(
provider="image",
config_file=None,
fixer_config=None,
images=None,
image_list_file=None,
scanners=["vuln"],
image_config_scanners=None,
trivy_severity=None,
ignore_unfixed=False,
timeout="5m",
registry="myregistry.io",
image_filter=None,
tag_filter=None,
max_images=0,
registry_insecure=False,
registry_list_images=True,
)
# Reproduce the exact crash sequence from __main__.py lines 289-294:
# Provider.init_global_provider(args)
# global_provider = Provider.get_global_provider()
# global_provider.print_credentials()
with mock.patch.object(Provider, "_global", None):
Provider.init_global_provider(arguments)
global_provider = Provider.get_global_provider()
# This is the line that crashes: global_provider is None so
# .print_credentials() raises AttributeError.
global_provider.print_credentials()
@@ -0,0 +1,220 @@
from unittest import mock
from prowler.providers.m365.services.entra.entra_service import PIMAlert
from tests.providers.m365.m365_fixtures import DOMAIN, set_mocked_m365_provider
class Test_entra_pim_role_usage_alert_exists:
def test_no_pim_alerts(self):
"""Test when no PIM alerts exist - should FAIL since the alert is always evaluated."""
entra_client = mock.MagicMock
entra_client.audited_tenant = "audited_tenant"
entra_client.audited_domain = DOMAIN
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_m365_provider(),
),
mock.patch(
"prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists.entra_client",
new=entra_client,
),
):
from prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists import (
entra_pim_role_usage_alert_exists,
)
entra_client.pim_alerts = []
check = entra_pim_role_usage_alert_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "PIM alert for unused privileged roles does not exist or is not active."
)
assert result[0].resource_name == "PIM Role Usage Alert"
assert result[0].resource_id == "pimRoleUsageAlert"
def test_entra_pim_role_usage_alert_exists_pass(self):
"""Test when the PIM alert for unused privileged roles exists and is active."""
entra_client = mock.MagicMock
entra_client.audited_tenant = "audited_tenant"
entra_client.audited_domain = DOMAIN
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_m365_provider(),
),
mock.patch(
"prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists.entra_client",
new=entra_client,
),
):
from prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists import (
entra_pim_role_usage_alert_exists,
)
entra_client.pim_alerts = [
PIMAlert(
id="alert-1",
alert_definition_id="DirectoryRoleInactiveAlertDefinition",
scope_id="tenant-id",
scope_type="DirectoryRole",
is_active=True,
number_of_affected_items=3,
),
]
check = entra_pim_role_usage_alert_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "PIM alert for unused privileged roles exists and is active."
)
assert result[0].resource_name == "PIM Role Usage Alert"
assert result[0].resource_id == "pimRoleUsageAlert"
def test_entra_pim_role_usage_alert_exists_fail_not_active(self):
"""Test when the PIM alert for unused privileged roles exists but is not active."""
entra_client = mock.MagicMock
entra_client.audited_tenant = "audited_tenant"
entra_client.audited_domain = DOMAIN
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_m365_provider(),
),
mock.patch(
"prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists.entra_client",
new=entra_client,
),
):
from prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists import (
entra_pim_role_usage_alert_exists,
)
entra_client.pim_alerts = [
PIMAlert(
id="alert-1",
alert_definition_id="DirectoryRoleInactiveAlertDefinition",
scope_id="tenant-id",
scope_type="DirectoryRole",
is_active=False,
number_of_affected_items=0,
),
]
check = entra_pim_role_usage_alert_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "PIM alert for unused privileged roles does not exist or is not active."
)
assert result[0].resource_name == "PIM Role Usage Alert"
assert result[0].resource_id == "pimRoleUsageAlert"
def test_entra_pim_role_usage_alert_exists_fail_different_alert(self):
"""Test when PIM alerts exist but none match the expected definition ID."""
entra_client = mock.MagicMock
entra_client.audited_tenant = "audited_tenant"
entra_client.audited_domain = DOMAIN
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_m365_provider(),
),
mock.patch(
"prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists.entra_client",
new=entra_client,
),
):
from prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists import (
entra_pim_role_usage_alert_exists,
)
entra_client.pim_alerts = [
PIMAlert(
id="alert-1",
alert_definition_id="SomeOtherAlertDefinition",
scope_id="tenant-id",
scope_type="DirectoryRole",
is_active=True,
number_of_affected_items=1,
),
]
check = entra_pim_role_usage_alert_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "PIM alert for unused privileged roles does not exist or is not active."
)
assert result[0].resource_name == "PIM Role Usage Alert"
assert result[0].resource_id == "pimRoleUsageAlert"
def test_entra_pim_role_usage_alert_exists_pass_among_multiple_alerts(self):
"""Test when multiple PIM alerts exist and the correct one is active."""
entra_client = mock.MagicMock
entra_client.audited_tenant = "audited_tenant"
entra_client.audited_domain = DOMAIN
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_m365_provider(),
),
mock.patch(
"prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists.entra_client",
new=entra_client,
),
):
from prowler.providers.m365.services.entra.entra_pim_role_usage_alert_exists.entra_pim_role_usage_alert_exists import (
entra_pim_role_usage_alert_exists,
)
entra_client.pim_alerts = [
PIMAlert(
id="alert-1",
alert_definition_id="SomeOtherAlertDefinition",
scope_id="tenant-id",
scope_type="DirectoryRole",
is_active=True,
number_of_affected_items=1,
),
PIMAlert(
id="alert-2",
alert_definition_id="DirectoryRoleInactiveAlertDefinition",
scope_id="tenant-id",
scope_type="DirectoryRole",
is_active=True,
number_of_affected_items=5,
),
]
check = entra_pim_role_usage_alert_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "PIM alert for unused privileged roles exists and is active."
)
assert result[0].resource_name == "PIM Role Usage Alert"
assert result[0].resource_id == "pimRoleUsageAlert"
+17
View File
@@ -2,6 +2,23 @@
All notable changes to the **Prowler UI** are documented in this file.
## [1.24.1] (Prowler v5.24.1)
### 🐞 Fixed
- Findings and filter UX fixes: exclude muted findings by default in the resource detail drawer and finding group resource views, show category context label (for example `Status: FAIL`) on MultiSelect triggers instead of hiding the placeholder, and add a `wide` width option for filter dropdowns applied to the findings Scan filter to prevent label truncation [(#10734)](https://github.com/prowler-cloud/prowler/pull/10734)
- Findings grouped view now handles zero-resource IaC counters, refines drawer loading states, and adds provider indicators to finding groups [(#10736)](https://github.com/prowler-cloud/prowler/pull/10736)
- Other Findings for this resource: ordering by `severity` [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
- Other Findings for this resource: show `delta` indicator [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
- Compliance: requirement findings do not show muted findings [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
- Latest new findings: link to finding groups order by `-severity,-last_seen_at` [(#10778)](https://github.com/prowler-cloud/prowler/pull/10778)
### 🔒 Security
- Upgrade React to 19.2.5 and Next.js to 16.2.3 to mitigate CVE-2026-23869 (React2DoS), a high-severity unauthenticated remote DoS vulnerability in the React Flight Protocol's Server Function deserialization [(#10754)](https://github.com/prowler-cloud/prowler/pull/10754)
---
## [1.24.0] (Prowler v5.24.0)
### 🚀 Added
@@ -115,4 +115,35 @@ describe("adaptFindingsByResourceResponse — malformed input", () => {
expect(result[0].id).toBe("finding-1");
expect(result[0].checkId).toBe("s3_check");
});
it("should normalize a single finding response into a one-item drawer array", () => {
// Given — getFindingById returns a single JSON:API resource object
const input = {
data: {
id: "finding-1",
attributes: {
uid: "uid-1",
check_id: "s3_check",
status: "FAIL",
severity: "critical",
check_metadata: {
checktitle: "S3 Check",
},
},
relationships: {
resources: { data: [] },
scan: { data: null },
},
},
included: [],
};
// When
const result = adaptFindingsByResourceResponse(input);
// Then
expect(result).toHaveLength(1);
expect(result[0].id).toBe("finding-1");
expect(result[0].checkTitle).toBe("S3 Check");
});
});
@@ -165,16 +165,18 @@ type IncludedDict = Record<string, IncludedItem>;
* then resolves each finding's resource and provider relationships.
*/
interface JsonApiResponse {
data: FindingApiItem[];
data: FindingApiItem | FindingApiItem[];
included?: Record<string, unknown>[];
}
function isJsonApiResponse(value: unknown): value is JsonApiResponse {
const data = (value as { data?: unknown })?.data;
return (
value !== null &&
typeof value === "object" &&
"data" in value &&
Array.isArray((value as { data: unknown }).data)
(Array.isArray(data) || (data !== null && typeof data === "object"))
);
}
@@ -188,8 +190,11 @@ export function adaptFindingsByResourceResponse(
const resourcesDict = createDict("resources", apiResponse) as IncludedDict;
const scansDict = createDict("scans", apiResponse) as IncludedDict;
const providersDict = createDict("providers", apiResponse) as IncludedDict;
const findings = Array.isArray(apiResponse.data)
? apiResponse.data
: [apiResponse.data];
return apiResponse.data.map((item) => {
return findings.map((item) => {
const attrs = item.attributes;
const meta = (attrs.check_metadata || {}) as Record<string, unknown>;
const remediationRaw = meta.remediation as
@@ -43,6 +43,7 @@ vi.mock("@/actions/finding-groups", () => ({
}));
import {
getLatestFindingsByResourceUid,
resolveFindingIdsByCheckIds,
resolveFindingIdsByVisibleGroupResources,
} from "./findings-by-resource";
@@ -262,3 +263,46 @@ describe("resolveFindingIdsByVisibleGroupResources", () => {
expect(fetchMock).not.toHaveBeenCalled();
});
});
describe("getLatestFindingsByResourceUid", () => {
beforeEach(() => {
vi.clearAllMocks();
vi.stubGlobal("fetch", fetchMock);
getAuthHeadersMock.mockResolvedValue({ Authorization: "Bearer token" });
handleApiResponseMock.mockResolvedValue({ data: [] });
});
it("should restrict to FAIL, exclude muted findings, and apply severity/time sorting by default", async () => {
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
await getLatestFindingsByResourceUid({
resourceUid: "resource-1",
});
const calledUrl = new URL(fetchMock.mock.calls[0][0]);
expect(calledUrl.pathname).toBe("/api/v1/findings/latest");
expect(calledUrl.searchParams.get("filter[resource_uid]")).toBe(
"resource-1",
);
// Status filter is applied server-side so the page[size]=50 window
// always holds FAIL rows — guards against PASS-heavy resources
// starving FAILs out of the result.
expect(calledUrl.searchParams.get("filter[status]")).toBe("FAIL");
expect(calledUrl.searchParams.get("filter[muted]")).toBe("false");
expect(calledUrl.searchParams.get("sort")).toBe("severity,-updated_at");
});
it("should include muted findings only when explicitly requested", async () => {
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
await getLatestFindingsByResourceUid({
resourceUid: "resource-1",
includeMuted: true,
});
const calledUrl = new URL(fetchMock.mock.calls[0][0]);
expect(calledUrl.searchParams.get("filter[status]")).toBe("FAIL");
expect(calledUrl.searchParams.get("filter[muted]")).toBe("include");
expect(calledUrl.searchParams.get("sort")).toBe("severity,-updated_at");
});
});
+5 -2
View File
@@ -250,10 +250,12 @@ export const getLatestFindingsByResourceUid = async ({
resourceUid,
page = 1,
pageSize = 50,
includeMuted = false,
}: {
resourceUid: string;
page?: number;
pageSize?: number;
includeMuted?: boolean;
}) => {
const headers = await getAuthHeaders({ contentType: false });
@@ -262,8 +264,9 @@ export const getLatestFindingsByResourceUid = async ({
);
url.searchParams.append("filter[resource_uid]", resourceUid);
url.searchParams.append("filter[muted]", "include");
url.searchParams.append("sort", "-severity,-updated_at");
url.searchParams.append("filter[status]", "FAIL");
url.searchParams.append("filter[muted]", includeMuted ? "include" : "false");
url.searchParams.append("sort", "severity,-updated_at");
if (page) url.searchParams.append("page[number]", page.toString());
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
+9 -1
View File
@@ -141,7 +141,15 @@ export const getLatestMetadataInfo = async ({
}
};
export const getFindingById = async (findingId: string, include = "") => {
interface GetFindingByIdOptions {
source?: "resource-detail-drawer";
}
export const getFindingById = async (
findingId: string,
include = "",
_options?: GetFindingByIdOptions,
) => {
const headers = await getAuthHeaders({ contentType: false });
const url = new URL(`${apiBaseUrl}/findings/${findingId}`);
@@ -4,6 +4,7 @@ import { getLatestFindings } from "@/actions/findings/findings";
import { LighthouseBanner } from "@/components/lighthouse/banner";
import { LinkToFindings } from "@/components/overview";
import { ColumnLatestFindings } from "@/components/overview/new-findings-table/table";
import { CardTitle } from "@/components/shadcn";
import { DataTable } from "@/components/ui/table";
import { createDict } from "@/lib/helper";
import { FindingProps, SearchParamsProps } from "@/types";
@@ -57,24 +58,23 @@ export async function FindingsViewSSR({ searchParams }: FindingsViewSSRProps) {
};
return (
<div className="flex w-full flex-col gap-6">
<div className="flex w-full flex-col">
<LighthouseBanner />
<div className="relative w-full flex-col justify-between md:flex-row">
<div className="flex w-full flex-col items-start gap-2 md:flex-row md:items-center">
<h3 className="text-sm font-bold text-nowrap whitespace-nowrap uppercase">
Latest new failing findings
</h3>
<p className="text-text-neutral-tertiary text-xs whitespace-nowrap">
Showing the latest 10 new failing findings by severity.
</p>
<LinkToFindings />
</div>
</div>
<DataTable
key={`dashboard-findings-${Date.now()}`}
columns={ColumnLatestFindings}
data={(expandedResponse?.data || []) as FindingProps[]}
header={
<div className="flex w-full items-center justify-between gap-4">
<div className="flex flex-col gap-0.5">
<CardTitle>Latest New Failed Findings</CardTitle>
<p className="text-text-neutral-tertiary text-xs">
Showing the latest 10 sorted by severity
</p>
</div>
<LinkToFindings />
</div>
}
/>
</div>
);
@@ -1,6 +1,7 @@
import { Skeleton } from "@heroui/skeleton";
import { Suspense } from "react";
import { SkeletonTableNewFindings } from "@/components/overview/new-findings-table/table";
import { SearchParamsProps } from "@/types";
import { GraphsTabsClient } from "./_components/graphs-tabs-client";
@@ -18,6 +19,10 @@ const LoadingFallback = () => (
</div>
);
const TAB_FALLBACKS: Partial<Record<TabId, React.ReactNode>> = {
findings: <SkeletonTableNewFindings />,
};
type GraphComponent = React.ComponentType<{ searchParams: SearchParamsProps }>;
const GRAPH_COMPONENTS: Record<TabId, GraphComponent> = {
@@ -38,9 +43,10 @@ export const GraphsTabsWrapper = async ({
const tabsContent = Object.fromEntries(
GRAPH_TABS.map((tab) => {
const Component = GRAPH_COMPONENTS[tab.id];
const fallback = TAB_FALLBACKS[tab.id] ?? <LoadingFallback />;
return [
tab.id,
<Suspense key={tab.id} fallback={<LoadingFallback />}>
<Suspense key={tab.id} fallback={fallback}>
<Component searchParams={searchParams} />
</Suspense>,
];
+4 -2
View File
@@ -25,8 +25,10 @@ describe("findings page", () => {
expect(source).toContain("resolveFindingScanDateFilters");
});
it("uses getLatestFindingGroups for non-date/scan queries and getFindingGroups for historical", () => {
expect(source).toContain("hasDateOrScan");
it("uses resolved filters to choose getFindingGroups for historical queries and getLatestFindingGroups otherwise", () => {
expect(source).toContain("hasHistoricalData");
expect(source).toContain("hasDateOrScanFilter(filtersWithScanDates)");
expect(source).toContain("hasDateOrScanFilter(filters)");
expect(source).toContain("getFindingGroups");
expect(source).toContain("getLatestFindingGroups");
});
+6 -8
View File
@@ -34,9 +34,6 @@ export default async function Findings({
const { encodedSort } = extractSortAndKey(resolvedSearchParams);
const { filters, query } = extractFiltersAndQuery(resolvedSearchParams);
// Check if the searchParams contain any date or scan filter
const hasDateOrScan = hasDateOrScanFilter(resolvedSearchParams);
const [providersData, scansData] = await Promise.all([
getProviders({ pageSize: 50 }),
getScans({ pageSize: 50 }),
@@ -51,8 +48,10 @@ export default async function Findings({
},
});
const hasHistoricalData = hasDateOrScanFilter(filtersWithScanDates);
const metadataInfoData = await (
hasDateOrScan ? getMetadataInfo : getLatestMetadataInfo
hasHistoricalData ? getMetadataInfo : getLatestMetadataInfo
)({
query,
sort: encodedSort,
@@ -119,10 +118,9 @@ const SSRDataTable = async ({
const pageSize = parseInt(searchParams.pageSize?.toString() || "10", 10);
const { encodedSort } = extractSortAndKey(searchParams);
// Check if the searchParams contain any date or scan filter
const hasDateOrScan = hasDateOrScanFilter(searchParams);
const hasHistoricalData = hasDateOrScanFilter(filters);
const fetchFindingGroups = hasDateOrScan
const fetchFindingGroups = hasHistoricalData
? getFindingGroups
: getLatestFindingGroups;
@@ -151,7 +149,7 @@ const SSRDataTable = async ({
data={groups}
metadata={findingGroupsData?.meta}
resolvedFilters={filters}
hasHistoricalData={hasDateOrScan}
hasHistoricalData={hasHistoricalData}
/>
</>
);
@@ -62,6 +62,7 @@ export const ClientAccordionContent = ({
filters: {
"filter[check_id__in]": checkIds.join(","),
"filter[scan]": scanId,
"filter[muted]": "false",
...(region && { "filter[region__in]": region }),
},
page: parseInt(pageNumber, 10),
+3 -5
View File
@@ -12,6 +12,7 @@ import {
PopoverTrigger,
} from "@/components/shadcn/popover";
import { useUrlFilters } from "@/hooks/use-url-filters";
import { toLocalDateString } from "@/lib/date-utils";
import { cn } from "@/lib/utils";
/** Batch mode: caller controls both the pending date value and the notification callback (all-or-nothing). */
@@ -67,17 +68,14 @@ export const CustomDatePicker = ({
const applyDateFilter = (selectedDate: Date | undefined) => {
if (onBatchChange) {
// Batch mode: notify caller instead of updating URL
onBatchChange(
"inserted_at",
selectedDate ? format(selectedDate, "yyyy-MM-dd") : "",
);
onBatchChange("inserted_at", toLocalDateString(selectedDate) ?? "");
return;
}
// Instant mode (default): push to URL immediately
if (selectedDate) {
// Format as YYYY-MM-DD for the API
updateFilter("inserted_at", format(selectedDate, "yyyy-MM-dd"));
updateFilter("inserted_at", toLocalDateString(selectedDate) ?? "");
} else {
updateFilter("inserted_at", null);
}
+9 -45
View File
@@ -20,10 +20,13 @@ import { DataTableFilterCustom } from "@/components/ui/table";
import { useFilterBatch } from "@/hooks/use-filter-batch";
import { getCategoryLabel, getGroupLabel } from "@/lib/categories";
import { FilterType, ScanEntity } from "@/types";
import { DATA_TABLE_FILTER_MODE, FilterParam } from "@/types/filters";
import { DATA_TABLE_FILTER_MODE } from "@/types/filters";
import { ProviderProps } from "@/types/providers";
import { getFindingsFilterDisplayValue } from "./findings-filters.utils";
import {
buildFindingsFilterChips,
getFindingsFilterDisplayValue,
} from "./findings-filters.utils";
interface FindingsFiltersProps {
/** Provider data for ProviderTypeSelector and AccountsSelector */
@@ -37,30 +40,6 @@ interface FindingsFiltersProps {
uniqueGroups: string[];
}
/**
* Maps raw filter param keys (e.g. "filter[severity__in]") to human-readable labels.
* Used to render chips in the FilterSummaryStrip.
* Typed as Record<FilterParam, string> so TypeScript enforces exhaustiveness any
* addition to FilterParam will cause a compile error here if the label is missing.
*/
const FILTER_KEY_LABELS: Record<FilterParam, string> = {
"filter[provider_type__in]": "Provider",
"filter[provider_id__in]": "Account",
"filter[severity__in]": "Severity",
"filter[status__in]": "Status",
"filter[delta__in]": "Delta",
"filter[region__in]": "Region",
"filter[service__in]": "Service",
"filter[resource_type__in]": "Resource Type",
"filter[category__in]": "Category",
"filter[resource_groups__in]": "Resource Group",
"filter[scan__in]": "Scan",
"filter[scan_id]": "Scan",
"filter[scan_id__in]": "Scan",
"filter[inserted_at]": "Date",
"filter[muted]": "Muted",
};
export const FindingsFilters = ({
providers,
completedScanIds,
@@ -132,6 +111,7 @@ export const FindingsFilters = ({
key: FilterType.SCAN,
labelCheckboxGroup: "Scan ID",
values: completedScanIds,
width: "wide" as const,
valueLabelMapping: scanDetails,
labelFormatter: (value: string) =>
getFindingsFilterDisplayValue(`filter[${FilterType.SCAN}]`, value, {
@@ -144,25 +124,9 @@ export const FindingsFilters = ({
const hasCustomFilters = customFilters.length > 0;
// Build FilterChip[] from pendingFilters — one chip per individual value, not per key.
// Skip filter[muted]="false" — it is the silent default and should not appear as a chip.
const filterChips: FilterChip[] = [];
Object.entries(pendingFilters).forEach(([key, values]) => {
if (!values || values.length === 0) return;
const label = FILTER_KEY_LABELS[key as FilterParam] ?? key;
values.forEach((value) => {
// Do not show a chip for the default muted=false state
if (key === "filter[muted]" && value === "false") return;
filterChips.push({
key,
label,
value,
displayValue: getFindingsFilterDisplayValue(key, value, {
providers,
scans: scanDetails,
}),
});
});
const filterChips: FilterChip[] = buildFindingsFilterChips(pendingFilters, {
providers,
scans: scanDetails,
});
// Handler for removing a single chip: update the pending filter to remove that value.
@@ -3,7 +3,10 @@ import { describe, expect, it } from "vitest";
import { ProviderProps } from "@/types/providers";
import { ScanEntity } from "@/types/scans";
import { getFindingsFilterDisplayValue } from "./findings-filters.utils";
import {
buildFindingsFilterChips,
getFindingsFilterDisplayValue,
} from "./findings-filters.utils";
function makeProvider(
overrides: Partial<ProviderProps> & { id: string },
@@ -98,7 +101,7 @@ describe("getFindingsFilterDisplayValue", () => {
it("shows the resolved scan badge label for scan filters instead of formatting the raw scan id", () => {
expect(
getFindingsFilterDisplayValue("filter[scan__in]", "scan-1", { scans }),
).toBe("Nightly scan");
).toBe("AWS - Nightly scan");
});
it("normalizes finding statuses for display", () => {
@@ -119,7 +122,17 @@ describe("getFindingsFilterDisplayValue", () => {
);
});
it("falls back to the scan provider uid when the alias is missing", () => {
it("formats the singular delta filter the same as delta__in", () => {
// The API registers the filter as `filter[delta]` (exact), not `delta__in`.
// Both shapes must resolve to the same human label so chips don't show
// the raw "new" going through formatLabel ("NEW" via the 3-letter acronym heuristic).
expect(getFindingsFilterDisplayValue("filter[delta]", "new")).toBe("New");
expect(getFindingsFilterDisplayValue("filter[delta]", "changed")).toBe(
"Changed",
);
});
it("uses the provider display name regardless of account alias/uid", () => {
expect(
getFindingsFilterDisplayValue("filter[scan__in]", "scan-2", {
scans: [
@@ -133,17 +146,17 @@ describe("getFindingsFilterDisplayValue", () => {
}),
],
}),
).toBe("Weekly scan");
).toBe("AWS - Weekly scan");
});
it("falls back to the provider alias when the scan name is missing", () => {
it("returns only the provider name when the scan name is missing", () => {
expect(
getFindingsFilterDisplayValue("filter[scan__in]", "scan-3", {
scans: [
...scans,
makeScanMap("scan-3", {
providerInfo: {
provider: "aws",
provider: "gcp",
alias: "Fallback Account",
uid: "333333333333",
},
@@ -154,7 +167,7 @@ describe("getFindingsFilterDisplayValue", () => {
}),
],
}),
).toBe("Fallback Account");
).toBe("Google Cloud");
});
it("keeps the raw scan value when the scan cannot be resolved", () => {
@@ -185,3 +198,85 @@ describe("getFindingsFilterDisplayValue", () => {
).toBe("2026-04-07");
});
});
describe("buildFindingsFilterChips", () => {
it("creates one chip per value with normalized labels", () => {
// Given — this is the exact pending state derived from the LinkToFindings URL:
// /findings?sort=...&filter[status__in]=FAIL&filter[delta]=new
const pendingFilters = {
"filter[status__in]": ["FAIL"],
"filter[delta]": ["new"],
};
// When
const chips = buildFindingsFilterChips(pendingFilters);
// Then — both chips must appear; the delta chip must use "Delta" as label
// (not the raw "filter[delta]") and "New" as displayValue (not "NEW" via
// the short-word acronym heuristic in formatLabel).
expect(chips).toEqual([
{
key: "filter[status__in]",
label: "Status",
value: "FAIL",
displayValue: "Fail",
},
{
key: "filter[delta]",
label: "Delta",
value: "new",
displayValue: "New",
},
]);
});
it("treats filter[delta] and filter[delta__in] identically", () => {
// Given
const chipsSingular = buildFindingsFilterChips({
"filter[delta]": ["new", "changed"],
});
const chipsPlural = buildFindingsFilterChips({
"filter[delta__in]": ["new", "changed"],
});
// Then — both shapes produce the same human labels and display values
expect(
chipsSingular.map((c) => ({ label: c.label, v: c.displayValue })),
).toEqual([
{ label: "Delta", v: "New" },
{ label: "Delta", v: "Changed" },
]);
expect(
chipsPlural.map((c) => ({ label: c.label, v: c.displayValue })),
).toEqual([
{ label: "Delta", v: "New" },
{ label: "Delta", v: "Changed" },
]);
});
it("skips the silent default filter[muted]=false", () => {
const chips = buildFindingsFilterChips({
"filter[muted]": ["false"],
"filter[delta]": ["new"],
});
// Only the delta chip — the default muted=false should not surface
expect(chips).toHaveLength(1);
expect(chips[0].key).toBe("filter[delta]");
});
it("surfaces unmapped keys using the raw key as label (fallback)", () => {
const chips = buildFindingsFilterChips({
"filter[unknown_future_key]": ["value"],
});
expect(chips).toEqual([
{
key: "filter[unknown_future_key]",
label: "filter[unknown_future_key]",
value: "value",
displayValue: "Value",
},
]);
});
});
@@ -1,5 +1,8 @@
import type { FilterChip } from "@/components/filters/filter-summary-strip";
import { formatLabel, getCategoryLabel, getGroupLabel } from "@/lib/categories";
import { getScanEntityLabel } from "@/lib/helper-filters";
import { FINDING_STATUS_DISPLAY_NAMES } from "@/types";
import { FilterParam } from "@/types/filters";
import { getProviderDisplayName, ProviderProps } from "@/types/providers";
import { ScanEntity } from "@/types/scans";
import { SEVERITY_DISPLAY_NAMES } from "@/types/severities";
@@ -35,12 +38,7 @@ function getScanDisplayValue(
return scanId;
}
return (
scan.attributes.name ||
scan.providerInfo.alias ||
scan.providerInfo.uid ||
scanId
);
return getScanEntityLabel(scan) || scanId;
}
export function getFindingsFilterDisplayValue(
@@ -55,7 +53,7 @@ export function getFindingsFilterDisplayValue(
if (filterKey === "filter[provider_id__in]") {
return getProviderAccountDisplayValue(value, options.providers || []);
}
if (filterKey === "filter[scan__in]") {
if (filterKey === "filter[scan__in]" || filterKey === "filter[scan]") {
return getScanDisplayValue(value, options.scans || []);
}
if (filterKey === "filter[severity__in]") {
@@ -72,7 +70,7 @@ export function getFindingsFilterDisplayValue(
] ?? formatLabel(value)
);
}
if (filterKey === "filter[delta__in]") {
if (filterKey === "filter[delta__in]" || filterKey === "filter[delta]") {
return (
FINDING_DELTA_DISPLAY_NAMES[value.toLowerCase()] ?? formatLabel(value)
);
@@ -93,3 +91,67 @@ export function getFindingsFilterDisplayValue(
return formatLabel(value);
}
/**
* Maps raw filter param keys (e.g. "filter[severity__in]") to human-readable labels.
* Used to render chips in the FilterSummaryStrip.
* Typed as Record<FilterParam, string> so TypeScript enforces exhaustiveness any
* addition to FilterParam will cause a compile error here if the label is missing.
*/
export const FILTER_KEY_LABELS: Record<FilterParam, string> = {
"filter[provider_type__in]": "Provider",
"filter[provider_id__in]": "Account",
"filter[severity__in]": "Severity",
"filter[status__in]": "Status",
"filter[delta__in]": "Delta",
"filter[delta]": "Delta",
"filter[region__in]": "Region",
"filter[service__in]": "Service",
"filter[resource_type__in]": "Resource Type",
"filter[category__in]": "Category",
"filter[resource_groups__in]": "Resource Group",
"filter[scan]": "Scan",
"filter[scan__in]": "Scan",
"filter[scan_id]": "Scan",
"filter[scan_id__in]": "Scan",
"filter[inserted_at]": "Date",
"filter[muted]": "Muted",
};
interface BuildFindingsFilterChipsOptions {
providers?: ProviderProps[];
scans?: Array<{ [scanId: string]: ScanEntity }>;
}
/**
* Builds the chips displayed in the FilterSummaryStrip from a pendingFilters map.
*
* - One chip per individual value (not one per key), so a multi-select filter
* produces multiple chips.
* - Silently skips the default `filter[muted]=false` so it doesn't appear as a
* user-applied filter.
* - Falls back to the raw key as label for unmapped keys, so an unexpected
* param still surfaces instead of disappearing.
*/
export function buildFindingsFilterChips(
pendingFilters: Record<string, string[]>,
options: BuildFindingsFilterChipsOptions = {},
): FilterChip[] {
const chips: FilterChip[] = [];
Object.entries(pendingFilters).forEach(([key, values]) => {
if (!values || values.length === 0) return;
const label = FILTER_KEY_LABELS[key as FilterParam] ?? key;
values.forEach((value) => {
if (key === "filter[muted]" && value === "false") return;
chips.push({
key,
label,
value,
displayValue: getFindingsFilterDisplayValue(key, value, options),
});
});
});
return chips;
}
@@ -78,6 +78,18 @@ vi.mock("./notification-indicator", () => ({
},
}));
vi.mock("@/components/shadcn/tooltip", () => ({
Tooltip: ({ children }: { children: ReactNode }) => <>{children}</>,
TooltipContent: ({ children }: { children: ReactNode }) => <>{children}</>,
TooltipTrigger: ({ children }: { children: ReactNode }) => <>{children}</>,
}));
vi.mock("./provider-icon-cell", () => ({
ProviderIconCell: ({ provider }: { provider: string }) => (
<span data-testid={`provider-icon-${provider}`}>{provider}</span>
),
}));
// ---------------------------------------------------------------------------
// Import after mocks
// ---------------------------------------------------------------------------
@@ -148,6 +160,26 @@ function renderFindingCell(
render(<div>{CellComponent({ row: { original: group } })}</div>);
}
function renderFindingGroupTitleCell(overrides?: Partial<FindingGroupRow>) {
const columns = getColumnFindingGroups({
rowSelection: {},
selectableRowCount: 1,
onDrillDown: vi.fn(),
});
const findingColumn = columns.find(
(col) => (col as { accessorKey?: string }).accessorKey === "finding",
);
if (!findingColumn?.cell) throw new Error("finding column not found");
const group = makeGroup(overrides);
const CellComponent = findingColumn.cell as (props: {
row: { original: FindingGroupRow };
}) => ReactNode;
render(<div>{CellComponent({ row: { original: group } })}</div>);
}
function renderImpactedResourcesCell(overrides?: Partial<FindingGroupRow>) {
const columns = getColumnFindingGroups({
rowSelection: {},
@@ -171,11 +203,13 @@ function renderImpactedResourcesCell(overrides?: Partial<FindingGroupRow>) {
}
function renderSelectCell(overrides?: Partial<FindingGroupRow>) {
const onDrillDown =
vi.fn<(checkId: string, group: FindingGroupRow) => void>();
const toggleSelected = vi.fn();
const columns = getColumnFindingGroups({
rowSelection: {},
selectableRowCount: 1,
onDrillDown: vi.fn(),
onDrillDown,
});
const selectColumn = columns.find(
@@ -206,7 +240,7 @@ function renderSelectCell(overrides?: Partial<FindingGroupRow>) {
</div>,
);
return { toggleSelected };
return { onDrillDown, toggleSelected };
}
// ---------------------------------------------------------------------------
@@ -231,6 +265,15 @@ describe("column-finding-groups — accessibility of check title cell", () => {
expect(impactedProvidersColumn).toBeUndefined();
});
it("should render the first provider icon with its provider name", () => {
// Given
renderFindingGroupTitleCell({ providers: ["iac"] });
// Then
expect(screen.getByTestId("provider-icon-iac")).toBeInTheDocument();
expect(screen.getByText("Infrastructure as Code")).toBeInTheDocument();
});
it("should render the check title as a button element (not a <p>)", () => {
// Given
const onDrillDown =
@@ -332,6 +375,47 @@ describe("column-finding-groups — accessibility of check title cell", () => {
}),
);
});
it("should keep zero-resource fallback groups non-clickable even when fallback counts are present", () => {
// Given
const onDrillDown =
vi.fn<(checkId: string, group: FindingGroupRow) => void>();
renderFindingCell("Fallback IaC Check", onDrillDown, {
resourcesTotal: 0,
resourcesFail: 0,
failCount: 0,
passCount: 2,
manualCount: 1,
});
// Then
expect(
screen.queryByRole("button", { name: "Fallback IaC Check" }),
).not.toBeInTheDocument();
expect(screen.getByText("Fallback IaC Check")).toBeInTheDocument();
expect(onDrillDown).not.toHaveBeenCalled();
});
it("should keep fallback groups non-clickable when the displayed total is zero", () => {
// Given
const onDrillDown =
vi.fn<(checkId: string, group: FindingGroupRow) => void>();
// When
renderFindingCell("No failing findings", onDrillDown, {
resourcesTotal: 0,
resourcesFail: 0,
failCount: 0,
passCount: 0,
});
// Then
expect(
screen.queryByRole("button", { name: "No failing findings" }),
).not.toBeInTheDocument();
expect(screen.getByText("No failing findings")).toBeInTheDocument();
});
});
describe("column-finding-groups — impacted resources count", () => {
@@ -345,6 +429,36 @@ describe("column-finding-groups — impacted resources count", () => {
// Then
expect(screen.getByText("3/5")).toBeInTheDocument();
});
it("should fall back to finding counts when resources total is zero", () => {
// Given/When
renderImpactedResourcesCell({
resourcesTotal: 0,
resourcesFail: 0,
failCount: 3,
passCount: 2,
muted: false,
});
// Then
expect(screen.getByText("3/5")).toBeInTheDocument();
});
it("should include muted findings in the denominator when the row is muted", () => {
// Given/When
renderImpactedResourcesCell({
resourcesTotal: 0,
resourcesFail: 0,
failCount: 3,
passCount: 2,
failMutedCount: 4,
passMutedCount: 1,
muted: true,
});
// Then
expect(screen.getByText("3/10")).toBeInTheDocument();
});
});
describe("column-finding-groups — group selection", () => {
@@ -357,6 +471,42 @@ describe("column-finding-groups — group selection", () => {
expect(screen.getByRole("checkbox", { name: "Select row" })).toBeDisabled();
});
it("should hide the chevron for zero-resource fallback groups even when fallback counts are present", () => {
// Given
const { onDrillDown } = renderSelectCell({
resourcesTotal: 0,
resourcesFail: 0,
failCount: 0,
passCount: 2,
manualCount: 1,
});
// Then
expect(
screen.queryByRole("button", {
name: "Expand S3 Bucket Public Access",
}),
).not.toBeInTheDocument();
expect(onDrillDown).not.toHaveBeenCalled();
});
it("should hide the chevron for zero-resource groups when the displayed total is zero", () => {
// Given/When
renderSelectCell({
resourcesTotal: 0,
resourcesFail: 0,
failCount: 0,
passCount: 0,
});
// Then
expect(
screen.queryByRole("button", {
name: "Expand S3 Bucket Public Access",
}),
).not.toBeInTheDocument();
});
});
describe("column-finding-groups — indicators", () => {
@@ -4,6 +4,11 @@ import { ColumnDef, RowSelectionState } from "@tanstack/react-table";
import { ChevronRight } from "lucide-react";
import { Checkbox } from "@/components/shadcn";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/shadcn/tooltip";
import {
DataTableColumnHeader,
SeverityBadge,
@@ -11,15 +16,19 @@ import {
} from "@/components/ui/table";
import { cn } from "@/lib";
import {
canDrillDownFindingGroup,
getFilteredFindingGroupDelta,
getFindingGroupImpactedCounts,
isFindingGroupMuted,
} from "@/lib/findings-groups";
import { FindingGroupRow } from "@/types";
import { getProviderDisplayName } from "@/types/providers";
import { DataTableRowActions } from "./data-table-row-actions";
import { canMuteFindingGroup } from "./finding-group-selection";
import { ImpactedResourcesCell } from "./impacted-resources-cell";
import { DeltaValues, NotificationIndicator } from "./notification-indicator";
import { NotificationIndicator } from "./notification-indicator";
import { ProviderIconCell } from "./provider-icon-cell";
interface GetColumnFindingGroupsOptions {
rowSelection: RowSelectionState;
@@ -83,14 +92,7 @@ export function getColumnFindingGroups({
const allMuted = isFindingGroupMuted(group);
const isExpanded = expandedCheckId === group.checkId;
const deltaKey = getFilteredFindingGroupDelta(group, filters);
const delta =
deltaKey === "new"
? DeltaValues.NEW
: deltaKey === "changed"
? DeltaValues.CHANGED
: DeltaValues.NONE;
const canExpand = group.resourcesTotal > 0;
const canExpand = canDrillDownFindingGroup(group);
const canSelect = canMuteFindingGroup({
resourcesFail: group.resourcesFail,
resourcesTotal: group.resourcesTotal,
@@ -101,7 +103,7 @@ export function getColumnFindingGroups({
return (
<div className="flex items-center gap-2">
<NotificationIndicator
delta={delta}
delta={deltaKey}
isMuted={allMuted}
showDeltaWhenMuted
/>
@@ -175,23 +177,43 @@ export function getColumnFindingGroups({
),
cell: ({ row }) => {
const group = row.original;
const canExpand = group.resourcesTotal > 0;
const canExpand = canDrillDownFindingGroup(group);
const provider = group.providers[0];
const providerName = provider
? getProviderDisplayName(provider)
: undefined;
return (
<div>
{canExpand ? (
<button
type="button"
className="text-text-neutral-primary hover:text-button-tertiary w-full cursor-pointer border-none bg-transparent p-0 text-left text-sm break-words whitespace-normal hover:underline"
onClick={() => onDrillDown(group.checkId, group)}
>
{group.checkTitle}
</button>
) : (
<span className="text-text-neutral-primary w-full text-left text-sm break-words whitespace-normal">
{group.checkTitle}
</span>
)}
<div className="flex items-center gap-2">
{provider && providerName ? (
<Tooltip>
<TooltipTrigger asChild>
<div className="shrink-0">
<ProviderIconCell
provider={provider}
size={20}
className="size-5 rounded-none bg-transparent"
/>
</div>
</TooltipTrigger>
<TooltipContent side="top">{providerName}</TooltipContent>
</Tooltip>
) : null}
<div>
{canExpand ? (
<button
type="button"
className="text-text-neutral-primary hover:text-button-tertiary w-full cursor-pointer border-none bg-transparent p-0 text-left text-sm break-words whitespace-normal hover:underline"
onClick={() => onDrillDown(group.checkId, group)}
>
{group.checkTitle}
</button>
) : (
<span className="text-text-neutral-primary w-full text-left text-sm break-words whitespace-normal">
{group.checkTitle}
</span>
)}
</div>
</div>
);
},
@@ -216,10 +238,11 @@ export function getColumnFindingGroups({
),
cell: ({ row }) => {
const group = row.original;
const counts = getFindingGroupImpactedCounts(group);
return (
<ImpactedResourcesCell
impacted={group.resourcesFail}
total={group.resourcesTotal}
impacted={counts.impacted}
total={counts.total}
/>
);
},
@@ -1,15 +1,15 @@
"use client";
import { ColumnDef } from "@tanstack/react-table";
import { Database } from "lucide-react";
import { Container } from "lucide-react";
import { CodeSnippet } from "@/components/ui/code-snippet/code-snippet";
import { DateWithTime } from "@/components/ui/entities";
import { DateWithTime, EntityInfo } from "@/components/ui/entities";
import {
DataTableColumnHeader,
SeverityBadge,
StatusFindingBadge,
} from "@/components/ui/table";
import { getRegionFlag } from "@/lib/region-flags";
import { FindingProps, ProviderType } from "@/types";
import { FindingDetailDrawer } from "./finding-detail-drawer";
@@ -126,18 +126,25 @@ export function getStandaloneFindingColumns({
<DataTableColumnHeader column={column} title="Resource name" />
),
cell: ({ row }) => {
const resourceName = getResourceData(row, "name");
if (resourceName === "-") {
return <p className="text-text-neutral-primary text-sm">-</p>;
}
const name = getResourceData(row, "name");
const uid = getResourceData(row, "uid");
const entityAlias =
typeof name === "string" && name.trim().length > 0 && name !== "-"
? name
: undefined;
const entityId =
typeof uid === "string" && uid.trim().length > 0 && uid !== "-"
? uid
: undefined;
return (
<CodeSnippet
value={resourceName as string}
formatter={(value: string) => `...${value.slice(-10)}`}
icon={<Database size={16} />}
/>
<div className="max-w-[240px]">
<EntityInfo
nameIcon={<Container className="size-4" />}
entityAlias={entityAlias}
entityId={entityId}
/>
</div>
);
},
enableSorting: false,
@@ -161,12 +168,17 @@ export function getStandaloneFindingColumns({
{
accessorKey: "provider",
header: ({ column }) => (
<DataTableColumnHeader column={column} title="Provider" />
<DataTableColumnHeader column={column} title="Cloud Provider" />
),
cell: ({ row }) => {
const provider = getProviderData(row, "provider");
return <ProviderIconCell provider={provider as ProviderType} />;
return (
<ProviderIconCell
provider={provider as ProviderType}
className="size-8"
/>
);
},
enableSorting: false,
},
@@ -193,10 +205,17 @@ export function getStandaloneFindingColumns({
cell: ({ row }) => {
const region = getResourceData(row, "region");
const regionText = typeof region === "string" ? region : "-";
const regionFlag =
typeof region === "string" ? getRegionFlag(region) : "";
return (
<p className="text-text-neutral-primary max-w-[120px] truncate text-sm">
{regionText}
</p>
<span className="text-text-neutral-primary flex max-w-[140px] items-center gap-1.5 truncate text-sm">
{regionFlag && (
<span className="translate-y-px text-base leading-none">
{regionFlag}
</span>
)}
<span className="truncate">{regionText}</span>
</span>
);
},
enableSorting: false,
@@ -30,7 +30,6 @@ export function FindingDetailDrawer({
}: FindingDetailDrawerProps) {
const drawer = useResourceDetailDrawer({
resources: [findingToFindingResourceRow(finding)],
checkId: finding.attributes.check_id,
totalResourceCount: 1,
initialIndex: defaultOpen || inline ? 0 : null,
});
@@ -63,6 +62,7 @@ export function FindingDetailDrawer({
checkMeta={drawer.checkMeta}
currentIndex={drawer.currentIndex}
totalResources={drawer.totalResources}
currentResource={drawer.currentResource}
currentFinding={drawer.currentFinding}
otherFindings={drawer.otherFindings}
onNavigatePrev={drawer.navigatePrev}
@@ -87,6 +87,7 @@ export function FindingDetailDrawer({
checkMeta={drawer.checkMeta}
currentIndex={drawer.currentIndex}
totalResources={drawer.totalResources}
currentResource={drawer.currentResource}
currentFinding={drawer.currentFinding}
otherFindings={drawer.otherFindings}
onNavigatePrev={drawer.navigatePrev}
@@ -22,6 +22,7 @@ import { useFindingGroupResourceState } from "@/hooks/use-finding-group-resource
import { cn, hasHistoricalFindingFilter } from "@/lib";
import {
getFilteredFindingGroupDelta,
getFindingGroupImpactedCounts,
isFindingGroupMuted,
} from "@/lib/findings-groups";
import { FindingGroupRow } from "@/types";
@@ -30,7 +31,8 @@ import { FloatingMuteButton } from "../floating-mute-button";
import { getColumnFindingResources } from "./column-finding-resources";
import { FindingsSelectionContext } from "./findings-selection-context";
import { ImpactedResourcesCell } from "./impacted-resources-cell";
import { DeltaValues, NotificationIndicator } from "./notification-indicator";
import { getFindingGroupEmptyStateMessage } from "./inline-resource-container.utils";
import { NotificationIndicator } from "./notification-indicator";
import { ResourceDetailDrawer } from "./resource-detail-drawer";
interface FindingsGroupDrillDownProps {
@@ -96,14 +98,8 @@ export function FindingsGroupDrillDown({
// Delta for the sticky header
const deltaKey = getFilteredFindingGroupDelta(group, filters);
const delta =
deltaKey === "new"
? DeltaValues.NEW
: deltaKey === "changed"
? DeltaValues.CHANGED
: DeltaValues.NONE;
const allMuted = isFindingGroupMuted(group);
const impactedCounts = getFindingGroupImpactedCounts(group);
const rows = table.getRowModel().rows;
@@ -139,7 +135,7 @@ export function FindingsGroupDrillDown({
{/* Notification indicator */}
<NotificationIndicator
delta={delta}
delta={deltaKey}
isMuted={allMuted}
showDeltaWhenMuted
/>
@@ -159,8 +155,8 @@ export function FindingsGroupDrillDown({
{/* Impacted resources count */}
<ImpactedResourcesCell
impacted={group.resourcesFail}
total={group.resourcesTotal}
impacted={impactedCounts.impacted}
total={impactedCounts.total}
/>
</div>
</div>
@@ -209,9 +205,7 @@ export function FindingsGroupDrillDown({
colSpan={columns.length}
className="h-24 text-center"
>
{Object.keys(filters).length > 0
? "No resources found for the selected filters."
: "No resources found."}
{getFindingGroupEmptyStateMessage(group, filters)}
</TableCell>
</TableRow>
) : null}
@@ -248,8 +242,10 @@ export function FindingsGroupDrillDown({
checkMeta={drawer.checkMeta}
currentIndex={drawer.currentIndex}
totalResources={drawer.totalResources}
currentResource={drawer.currentResource}
currentFinding={drawer.currentFinding}
otherFindings={drawer.otherFindings}
showSyntheticResourceHint={group.resourcesTotal === 0}
onNavigatePrev={drawer.navigatePrev}
onNavigateNext={drawer.navigateNext}
onMuteComplete={handleDrawerMuteComplete}
@@ -6,6 +6,7 @@ import { useRef, useState } from "react";
import { resolveFindingIdsByVisibleGroupResources } from "@/actions/findings/findings-by-resource";
import { DataTable } from "@/components/ui/table";
import { canDrillDownFindingGroup } from "@/lib/findings-groups";
import { FindingGroupRow, MetaDataProps } from "@/types";
import { FloatingMuteButton } from "../floating-mute-button";
@@ -140,7 +141,7 @@ export function FindingsGroupTable({
const handleDrillDown = (checkId: string, group: FindingGroupRow) => {
// No resources in the group → nothing to show, skip drill-down
if (group.resourcesTotal === 0) return;
if (!canDrillDownFindingGroup(group)) return;
// Toggle: same group = collapse, different = switch
if (expandedCheckId === checkId) {
@@ -20,6 +20,7 @@ import { getColumnFindingResources } from "./column-finding-resources";
import { FindingsSelectionContext } from "./findings-selection-context";
import {
getFilteredFindingGroupResourceCount,
getFindingGroupEmptyStateMessage,
getFindingGroupSkeletonCount,
} from "./inline-resource-container.utils";
import { ResourceDetailDrawer } from "./resource-detail-drawer";
@@ -278,9 +279,7 @@ export function InlineResourceContainer({
colSpan={columns.length}
className="h-24 text-center"
>
{Object.keys(filters).length > 0
? "No resources found for the selected filters."
: "No resources found."}
{getFindingGroupEmptyStateMessage(group, filters)}
</TableCell>
</TableRow>
)}
@@ -334,8 +333,10 @@ export function InlineResourceContainer({
checkMeta={drawer.checkMeta}
currentIndex={drawer.currentIndex}
totalResources={drawer.totalResources}
currentResource={drawer.currentResource}
currentFinding={drawer.currentFinding}
otherFindings={drawer.otherFindings}
showSyntheticResourceHint={group.resourcesTotal === 0}
onNavigatePrev={drawer.navigatePrev}
onNavigateNext={drawer.navigateNext}
onMuteComplete={handleDrawerMuteComplete}
@@ -4,6 +4,7 @@ import type { FindingGroupRow } from "@/types";
import {
getFilteredFindingGroupResourceCount,
getFindingGroupEmptyStateMessage,
getFindingGroupSkeletonCount,
isFailOnlyStatusFilter,
} from "./inline-resource-container.utils";
@@ -99,3 +100,47 @@ describe("getFindingGroupSkeletonCount", () => {
).toBe(1);
});
});
describe("getFindingGroupEmptyStateMessage", () => {
it("returns the muted hint when muted findings are excluded and no visible resources remain", () => {
expect(
getFindingGroupEmptyStateMessage(
makeGroup({
resourcesTotal: 0,
resourcesFail: 0,
mutedCount: 1,
failCount: 0,
passCount: 0,
}),
{
"filter[status]": "FAIL",
"filter[muted]": "false",
},
),
).toBe(
"No resources match the current filters. Try enabling Include muted to view muted findings.",
);
});
it("keeps the generic filtered empty state when muted findings are already included", () => {
expect(
getFindingGroupEmptyStateMessage(
makeGroup({
resourcesTotal: 0,
resourcesFail: 0,
mutedCount: 1,
}),
{
"filter[status]": "FAIL",
"filter[muted]": "include",
},
),
).toBe("No resources found for the selected filters.");
});
it("keeps the generic empty state when no filters are active", () => {
expect(getFindingGroupEmptyStateMessage(makeGroup(), {})).toBe(
"No resources found.",
);
});
});
@@ -33,6 +33,18 @@ export function isFailOnlyStatusFilter(
return multiStatusValues.length === 1 && multiStatusValues[0] === "FAIL";
}
function includesMutedFindings(
filters: Record<string, string | string[] | undefined>,
): boolean {
const mutedFilter = filters["filter[muted]"];
if (Array.isArray(mutedFilter)) {
return mutedFilter.includes("include");
}
return mutedFilter === "include";
}
export function getFilteredFindingGroupResourceCount(
group: FindingGroupRow,
filters: Record<string, string | string[] | undefined>,
@@ -53,3 +65,24 @@ export function getFindingGroupSkeletonCount(
// empty state ("No resources found") replaces the skeleton.
return Math.max(1, Math.min(filteredTotal, maxSkeletonRows));
}
export function getFindingGroupEmptyStateMessage(
group: FindingGroupRow,
filters: Record<string, string | string[] | undefined>,
): string {
const hasFilters = Object.keys(filters).length > 0;
if (!hasFilters) {
return "No resources found.";
}
const mutedExcluded = !includesMutedFindings(filters);
const hasMutedFindings = (group.mutedCount ?? 0) > 0;
const visibleCount = getFilteredFindingGroupResourceCount(group, filters);
if (mutedExcluded && hasMutedFindings && visibleCount === 0) {
return "No resources match the current filters. Try enabling Include muted to view muted findings.";
}
return "No resources found for the selected filters.";
}
@@ -17,14 +17,11 @@ import {
} from "@/components/shadcn/tooltip";
import { DOCS_URLS } from "@/lib/external-urls";
import { cn } from "@/lib/utils";
import { FINDING_DELTA, type FindingDelta } from "@/types";
export const DeltaValues = {
NEW: "new",
CHANGED: "changed",
NONE: "none",
} as const;
export const DeltaValues = FINDING_DELTA;
export type DeltaType = (typeof DeltaValues)[keyof typeof DeltaValues];
export type DeltaType = Exclude<FindingDelta, null>;
interface NotificationIndicatorProps {
delta?: DeltaType;
@@ -124,12 +121,12 @@ function MutedIndicator({ mutedReason }: { mutedReason?: string }) {
<PopoverTrigger asChild>
<button
type="button"
className="flex w-4 shrink-0 cursor-pointer items-center justify-center bg-transparent p-0"
className="flex w-5 shrink-0 cursor-pointer items-center justify-center bg-transparent p-0"
onClick={(e) => e.stopPropagation()}
onMouseEnter={() => setOpen(true)}
onMouseLeave={() => setOpen(false)}
>
<MutedIcon className="text-bg-data-muted size-2" />
<MutedIcon className="text-bg-data-muted size-3" />
</button>
</PopoverTrigger>
<PopoverContent
@@ -14,12 +14,14 @@ const {
mockWindowOpen,
mockClipboardWriteText,
mockSearchParamsState,
mockNotificationIndicator,
} = vi.hoisted(() => ({
mockGetComplianceIcon: vi.fn((_: string) => null as string | null),
mockGetCompliancesOverview: vi.fn(),
mockWindowOpen: vi.fn(),
mockClipboardWriteText: vi.fn(),
mockSearchParamsState: { value: "" },
mockNotificationIndicator: vi.fn(),
}));
vi.mock("next/navigation", () => ({
@@ -134,7 +136,12 @@ vi.mock("@/components/shadcn/dropdown", () => ({
}));
vi.mock("@/components/shadcn/skeleton/skeleton", () => ({
Skeleton: () => <div />,
Skeleton: ({
className,
...props
}: HTMLAttributes<HTMLDivElement> & { className?: string }) => (
<div data-testid="inline-skeleton" className={className} {...props} />
),
}));
vi.mock("@/components/shadcn/spinner/spinner", () => ({
@@ -293,7 +300,11 @@ vi.mock("../delta-indicator", () => ({
}));
vi.mock("../notification-indicator", () => ({
NotificationIndicator: () => null,
NotificationIndicator: (props: Record<string, unknown>) => {
mockNotificationIndicator(props);
return null;
},
DeltaValues: { NEW: "new", CHANGED: "changed", NONE: "none" } as const,
}));
vi.mock("./resource-detail-skeleton", () => ({
@@ -309,6 +320,7 @@ vi.mock("../../muted", () => ({
// ---------------------------------------------------------------------------
import type { ResourceDrawerFinding } from "@/actions/findings";
import type { FindingResourceRow } from "@/types";
import { ResourceDetailDrawerContent } from "./resource-detail-drawer-content";
import type { CheckMeta } from "./use-resource-detail-drawer";
@@ -374,6 +386,29 @@ const mockFinding: ResourceDrawerFinding = {
scan: null,
};
const mockResourceRow: FindingResourceRow = {
id: "row-1",
rowType: "resource",
findingId: "finding-1",
checkId: "s3_check",
providerType: "aws",
providerAlias: "prod",
providerUid: "123456789",
resourceName: "my-bucket",
resourceType: "Bucket",
resourceGroup: "default",
resourceUid: "arn:aws:s3:::bucket",
service: "s3",
region: "us-east-1",
severity: "critical",
status: "FAIL",
delta: null,
isMuted: false,
mutedReason: undefined,
firstSeenAt: null,
lastSeenAt: null,
};
// ---------------------------------------------------------------------------
// Fix 1: Lighthouse AI button text change
// ---------------------------------------------------------------------------
@@ -937,3 +972,461 @@ describe("ResourceDetailDrawerContent — other findings mute refresh", () => {
expect(onMuteComplete).not.toHaveBeenCalled();
});
});
describe("ResourceDetailDrawerContent — synthetic resource empty state", () => {
it("should explain that simulated IaC resources never have other findings", () => {
// Given/When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating={false}
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={1}
currentFinding={mockFinding}
otherFindings={[]}
showSyntheticResourceHint
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(
screen.getByText(
"No other findings are available for this IaC resource.",
),
).toBeInTheDocument();
});
});
describe("ResourceDetailDrawerContent — current resource row display", () => {
it("should render resource card fields from the current resource row instead of the fetched finding", () => {
// Given
const currentResource: FindingResourceRow = {
...mockResourceRow,
providerAlias: "row-account",
providerUid: "row-provider-uid",
resourceName: "row-resource-name",
resourceUid: "row-resource-uid",
service: "row-service",
region: "eu-west-1",
resourceType: "row-type",
resourceGroup: "row-group",
severity: "low",
status: "PASS",
};
const fetchedFinding: ResourceDrawerFinding = {
...mockFinding,
providerAlias: "finding-account",
providerUid: "finding-provider-uid",
resourceName: "finding-resource-name",
resourceUid: "finding-resource-uid",
resourceService: "finding-service",
resourceRegion: "ap-south-1",
resourceType: "finding-type",
resourceGroup: "finding-group",
severity: "critical",
status: "FAIL",
};
// When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating={false}
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={1}
currentResource={currentResource}
currentFinding={fetchedFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(screen.getByText("row-service")).toBeInTheDocument();
expect(screen.getByText("eu-west-1")).toBeInTheDocument();
expect(screen.getByText("row-group")).toBeInTheDocument();
expect(screen.getByText("row-type")).toBeInTheDocument();
expect(screen.getByText("FAIL")).toBeInTheDocument();
expect(screen.getByText("critical")).toBeInTheDocument();
expect(screen.queryByText("finding-service")).not.toBeInTheDocument();
expect(screen.queryByText("ap-south-1")).not.toBeInTheDocument();
expect(screen.queryByText("finding-group")).not.toBeInTheDocument();
expect(screen.queryByText("finding-type")).not.toBeInTheDocument();
});
it("should prefer the fetched finding status and severity in the header when the current row is stale", () => {
// Given
const currentResource: FindingResourceRow = {
...mockResourceRow,
severity: "critical",
status: "FAIL",
isMuted: false,
};
const fetchedFinding: ResourceDrawerFinding = {
...mockFinding,
severity: "low",
status: "PASS",
isMuted: true,
mutedReason: "Muted after refresh",
};
// When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating={false}
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={1}
currentResource={currentResource}
currentFinding={fetchedFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(screen.getByText("PASS")).toBeInTheDocument();
expect(screen.getByText("low")).toBeInTheDocument();
expect(screen.queryByText("FAIL")).not.toBeInTheDocument();
expect(screen.queryByText("critical")).not.toBeInTheDocument();
});
});
describe("ResourceDetailDrawerContent — header skeleton while navigating", () => {
it("should keep row-backed navigation chrome visible while hiding stale finding details during carousel navigation", () => {
// Given
const currentResource: FindingResourceRow = {
...mockResourceRow,
checkId: mockCheckMeta.checkId,
resourceName: "next-bucket",
resourceUid: "next-resource-uid",
service: "ec2",
region: "eu-west-1",
resourceType: "Instance",
resourceGroup: "row-group",
severity: "low",
status: "PASS",
findingId: "finding-2",
};
// When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={2}
currentResource={currentResource}
currentFinding={mockFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(screen.getByText("PASS")).toBeInTheDocument();
expect(screen.getByText("low")).toBeInTheDocument();
expect(screen.getByText("ec2")).toBeInTheDocument();
expect(screen.getByText("eu-west-1")).toBeInTheDocument();
expect(screen.getByText("row-group")).toBeInTheDocument();
expect(
screen.getByRole("button", { name: "Finding Overview" }),
).toBeInTheDocument();
expect(
screen.getByRole("button", { name: "Other Findings For This Resource" }),
).toBeInTheDocument();
expect(screen.queryByText("uid-1")).not.toBeInTheDocument();
expect(screen.queryByText("Status extended")).not.toBeInTheDocument();
expect(screen.queryByText("FAIL")).not.toBeInTheDocument();
expect(screen.queryByText("critical")).not.toBeInTheDocument();
});
it("should skeletonize stale check-level header content when navigating to a different check", () => {
// Given
const currentResource: FindingResourceRow = {
...mockResourceRow,
checkId: "ec2_check",
findingId: "finding-2",
severity: "low",
status: "PASS",
};
// When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={2}
currentResource={currentResource}
currentFinding={mockFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(screen.getByTestId("drawer-header-skeleton")).toBeInTheDocument();
expect(screen.queryByText("S3 Check")).not.toBeInTheDocument();
expect(screen.queryByText("PCI-DSS")).not.toBeInTheDocument();
expect(screen.getByText("PASS")).toBeInTheDocument();
expect(screen.getByText("low")).toBeInTheDocument();
});
it("should keep same-check overview sections visible while hiding stale finding-specific details during navigation", () => {
// Given/When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={2}
currentResource={mockResourceRow}
currentFinding={mockFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(screen.getByText("Risk:")).toBeInTheDocument();
expect(screen.getByText("Description:")).toBeInTheDocument();
expect(screen.getByText("Remediation:")).toBeInTheDocument();
expect(screen.getByText("security")).toBeInTheDocument();
expect(screen.queryByText("Status Extended:")).not.toBeInTheDocument();
expect(screen.queryByText("uid-1")).not.toBeInTheDocument();
expect(
screen.queryByRole("link", {
name: "Analyze This Finding With Lighthouse AI",
}),
).not.toBeInTheDocument();
});
it("should keep the overview tab shell visible with section skeletons when navigating to a different check", () => {
// Given
const currentResource: FindingResourceRow = {
...mockResourceRow,
checkId: "ec2_check",
findingId: "finding-2",
severity: "low",
status: "PASS",
};
// When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={2}
currentResource={currentResource}
currentFinding={mockFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(
screen.getByTestId("overview-navigation-skeleton"),
).toBeInTheDocument();
expect(screen.queryByText("Risk:")).not.toBeInTheDocument();
expect(screen.queryByText("Description:")).not.toBeInTheDocument();
expect(screen.queryByText("Remediation:")).not.toBeInTheDocument();
expect(
screen.getByRole("button", { name: "Finding Overview" }),
).toBeInTheDocument();
expect(
screen.getByRole("button", { name: "Other Findings For This Resource" }),
).toBeInTheDocument();
});
it("should keep other findings table headers visible while skeletonizing only the rows during navigation", () => {
// Given/When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={2}
currentResource={mockResourceRow}
currentFinding={mockFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(screen.getByText("Status")).toBeInTheDocument();
expect(screen.getByText("Finding")).toBeInTheDocument();
expect(screen.getByText("Severity")).toBeInTheDocument();
expect(screen.getByText("Time")).toBeInTheDocument();
expect(
screen.getByTestId("other-findings-total-entries-skeleton"),
).toBeInTheDocument();
expect(
screen.getByTestId("other-findings-navigation-skeleton"),
).toBeInTheDocument();
});
it("should keep scans labels visible while skeletonizing only the scan values during navigation", () => {
// Given/When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={2}
currentResource={mockResourceRow}
currentFinding={mockFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(
screen.getByText("Showing the latest scan that evaluated this finding"),
).toBeInTheDocument();
expect(screen.getByText("Scan Name")).toBeInTheDocument();
expect(screen.getByText("Resources Scanned")).toBeInTheDocument();
expect(screen.getByText("Progress")).toBeInTheDocument();
expect(screen.getByText("Trigger")).toBeInTheDocument();
expect(screen.getByText("State")).toBeInTheDocument();
expect(screen.getByText("Duration")).toBeInTheDocument();
expect(screen.getByText("Started At")).toBeInTheDocument();
expect(screen.getByText("Completed At")).toBeInTheDocument();
expect(screen.getByText("Launched At")).toBeInTheDocument();
expect(screen.getByText("Scheduled At")).toBeInTheDocument();
expect(screen.getByTestId("scans-navigation-skeleton")).toBeInTheDocument();
});
it("should keep the events tab shell visible while showing timeline row skeletons during navigation", () => {
// Given/When
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={2}
currentResource={mockResourceRow}
currentFinding={mockFinding}
otherFindings={[]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
// Then
expect(screen.getByRole("button", { name: "Events" })).toBeInTheDocument();
expect(
screen.getByTestId("events-navigation-skeleton"),
).toBeInTheDocument();
});
});
describe("ResourceDetailDrawerContent — other findings delta/muted indicator", () => {
const renderWithOtherFinding = (
overrides: Partial<ResourceDrawerFinding>,
) => {
const otherFinding: ResourceDrawerFinding = {
...mockFinding,
id: "finding-2",
uid: "uid-2",
checkId: "ec2_check",
checkTitle: "EC2 Check",
...overrides,
};
render(
<ResourceDetailDrawerContent
isLoading={false}
isNavigating={false}
checkMeta={mockCheckMeta}
currentIndex={0}
totalResources={1}
currentFinding={mockFinding}
otherFindings={[otherFinding]}
onNavigatePrev={vi.fn()}
onNavigateNext={vi.fn()}
onMuteComplete={vi.fn()}
/>,
);
};
const lastNotificationIndicatorPropsForOtherRow = () => {
const calls = mockNotificationIndicator.mock.calls;
// Last call corresponds to the other-finding row (current finding row renders first).
return calls[calls.length - 1][0];
};
it("should forward delta='new' to the NotificationIndicator for a new other finding", () => {
renderWithOtherFinding({ delta: "new" });
expect(lastNotificationIndicatorPropsForOtherRow()).toMatchObject({
delta: "new",
isMuted: false,
showDeltaWhenMuted: true,
});
});
it("should forward delta='changed' to the NotificationIndicator for a changed other finding", () => {
renderWithOtherFinding({ delta: "changed" });
expect(lastNotificationIndicatorPropsForOtherRow()).toMatchObject({
delta: "changed",
});
});
it("should pass delta=undefined when the finding has delta='none'", () => {
renderWithOtherFinding({ delta: "none" });
expect(lastNotificationIndicatorPropsForOtherRow()).toMatchObject({
delta: undefined,
});
});
it("should forward mutedReason and keep delta when a muted other finding is also new", () => {
renderWithOtherFinding({
delta: "new",
isMuted: true,
mutedReason: "False positive",
});
expect(lastNotificationIndicatorPropsForOtherRow()).toMatchObject({
delta: "new",
isMuted: true,
mutedReason: "False positive",
showDeltaWhenMuted: true,
});
});
});
File diff suppressed because it is too large Load Diff
@@ -11,6 +11,7 @@ import {
DrawerHeader,
DrawerTitle,
} from "@/components/shadcn";
import type { FindingResourceRow } from "@/types";
import { ResourceDetailDrawerContent } from "./resource-detail-drawer-content";
import type { CheckMeta } from "./use-resource-detail-drawer";
@@ -23,8 +24,10 @@ interface ResourceDetailDrawerProps {
checkMeta: CheckMeta | null;
currentIndex: number;
totalResources: number;
currentResource: FindingResourceRow | null;
currentFinding: ResourceDrawerFinding | null;
otherFindings: ResourceDrawerFinding[];
showSyntheticResourceHint?: boolean;
onNavigatePrev: () => void;
onNavigateNext: () => void;
onMuteComplete: () => void;
@@ -38,8 +41,10 @@ export function ResourceDetailDrawer({
checkMeta,
currentIndex,
totalResources,
currentResource,
currentFinding,
otherFindings,
showSyntheticResourceHint = false,
onNavigatePrev,
onNavigateNext,
onMuteComplete,
@@ -64,8 +69,10 @@ export function ResourceDetailDrawer({
checkMeta={checkMeta}
currentIndex={currentIndex}
totalResources={totalResources}
currentResource={currentResource}
currentFinding={currentFinding}
otherFindings={otherFindings}
showSyntheticResourceHint={showSyntheticResourceHint}
onNavigatePrev={onNavigatePrev}
onNavigateNext={onNavigateNext}
onMuteComplete={onMuteComplete}
@@ -6,14 +6,17 @@ import { beforeEach, describe, expect, it, vi } from "vitest";
// ---------------------------------------------------------------------------
const {
getFindingByIdMock,
getLatestFindingsByResourceUidMock,
adaptFindingsByResourceResponseMock,
} = vi.hoisted(() => ({
getFindingByIdMock: vi.fn(),
getLatestFindingsByResourceUidMock: vi.fn(),
adaptFindingsByResourceResponseMock: vi.fn(),
}));
vi.mock("@/actions/findings", () => ({
getFindingById: getFindingByIdMock,
getLatestFindingsByResourceUid: getLatestFindingsByResourceUidMock,
adaptFindingsByResourceResponse: adaptFindingsByResourceResponseMock,
}));
@@ -109,6 +112,7 @@ describe("useResourceDetailDrawer — unmount cleanup", () => {
beforeEach(() => {
vi.clearAllMocks();
vi.restoreAllMocks();
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
});
it("should abort the in-flight fetch controller when the hook unmounts", async () => {
@@ -116,9 +120,7 @@ describe("useResourceDetailDrawer — unmount cleanup", () => {
const abortSpy = vi.spyOn(AbortController.prototype, "abort");
// never-resolving fetch to simulate in-flight request
getLatestFindingsByResourceUidMock.mockImplementation(
() => new Promise(() => {}),
);
getFindingByIdMock.mockImplementation(() => new Promise(() => {}));
adaptFindingsByResourceResponseMock.mockReturnValue([]);
const resources = [makeResource()];
@@ -126,7 +128,6 @@ describe("useResourceDetailDrawer — unmount cleanup", () => {
const { result, unmount } = renderHook(() =>
useResourceDetailDrawer({
resources,
checkId: "s3_check",
}),
);
@@ -136,7 +137,7 @@ describe("useResourceDetailDrawer — unmount cleanup", () => {
});
// Verify a fetch was started
expect(getLatestFindingsByResourceUidMock).toHaveBeenCalledTimes(1);
expect(getFindingByIdMock).toHaveBeenCalledTimes(1);
// Reset spy count to detect only the unmount abort
abortSpy.mockClear();
@@ -158,7 +159,6 @@ describe("useResourceDetailDrawer — unmount cleanup", () => {
const { unmount } = renderHook(() =>
useResourceDetailDrawer({
resources,
checkId: "s3_check",
}),
);
@@ -173,40 +173,59 @@ describe("useResourceDetailDrawer — unmount cleanup", () => {
describe("useResourceDetailDrawer — other findings filtering", () => {
beforeEach(() => {
vi.clearAllMocks();
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
});
it("should exclude the current finding from otherFindings and preserve API order", async () => {
it("should load other findings from the current resource uid and exclude only the current finding (status is filtered server-side)", async () => {
const resources = [makeResource()];
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
adaptFindingsByResourceResponseMock.mockReturnValue([
makeDrawerFinding({
id: "current",
checkId: "s3_check",
checkTitle: "Current",
status: "FAIL",
severity: "critical",
}),
makeDrawerFinding({
id: "other-1",
checkId: "check-other-1",
checkTitle: "Other 1",
status: "FAIL",
severity: "critical",
}),
makeDrawerFinding({
id: "other-2",
checkId: "check-other-2",
checkTitle: "Other 2",
status: "FAIL",
severity: "medium",
}),
]);
// Given — the API call applies filter[status]=FAIL server-side, so the
// mock returns only FAIL rows. The hook's only client-side job is to
// drop the row already shown above the table.
getFindingByIdMock.mockResolvedValue({ data: ["detail"] });
getLatestFindingsByResourceUidMock.mockResolvedValue({
data: ["resource"],
});
adaptFindingsByResourceResponseMock.mockImplementation(
(response: { data: string[] }) => {
if (response.data[0] === "detail") {
return [
makeDrawerFinding({
id: "finding-1",
checkId: "s3_check",
checkTitle: "Current",
status: "FAIL",
severity: "informational",
}),
];
}
return [
makeDrawerFinding({
id: "finding-3",
checkTitle: "First other finding",
status: "FAIL",
severity: "high",
}),
makeDrawerFinding({
id: "finding-1",
checkTitle: "Current finding duplicate from resource fetch",
status: "FAIL",
severity: "critical",
}),
makeDrawerFinding({
id: "finding-5",
checkTitle: "Second other finding",
status: "FAIL",
severity: "medium",
}),
];
},
);
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
checkId: "s3_check",
}),
);
@@ -215,47 +234,77 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
await Promise.resolve();
});
// Then
expect(getFindingByIdMock).toHaveBeenCalledWith(
"finding-1",
"resources,scan.provider",
{ source: "resource-detail-drawer" },
);
expect(getLatestFindingsByResourceUidMock).toHaveBeenCalledWith({
resourceUid: "arn:aws:s3:::my-bucket",
pageSize: 50,
includeMuted: false,
});
expect(result.current.currentFinding?.id).toBe("finding-1");
expect(result.current.otherFindings.map((finding) => finding.id)).toEqual([
"other-1",
"other-2",
"finding-3",
"finding-5",
]);
});
it("should exclude non-FAIL findings from otherFindings", async () => {
const resources = [makeResource()];
it("should skip loading other findings for synthetic IaC resources and keep the current detail on findingId", async () => {
const resources = [
makeResource({
findingId: "synthetic-finding",
resourceUid: "synthetic://iac-resource",
}),
];
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
// Given
getFindingByIdMock.mockResolvedValue({ data: ["detail"] });
adaptFindingsByResourceResponseMock.mockReturnValue([
makeDrawerFinding({
id: "current",
id: "synthetic-finding",
checkId: "s3_check",
status: "MANUAL",
severity: "informational",
}),
makeDrawerFinding({
id: "other-pass",
checkId: "check-pass",
status: "PASS",
severity: "low",
}),
makeDrawerFinding({
id: "other-manual",
checkId: "check-manual",
status: "MANUAL",
severity: "low",
}),
makeDrawerFinding({
id: "other-fail",
checkId: "check-fail",
status: "FAIL",
severity: "high",
}),
]);
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
checkId: "s3_check",
canLoadOtherFindings: false,
}),
);
await act(async () => {
// When
result.current.openDrawer(0);
await Promise.resolve();
});
// Then
expect(getFindingByIdMock).toHaveBeenCalledWith(
"synthetic-finding",
"resources,scan.provider",
{ source: "resource-detail-drawer" },
);
expect(getLatestFindingsByResourceUidMock).not.toHaveBeenCalled();
expect(result.current.currentFinding?.id).toBe("synthetic-finding");
expect(result.current.otherFindings).toEqual([]);
});
it("should request muted findings only when explicitly enabled", async () => {
const resources = [makeResource()];
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
adaptFindingsByResourceResponseMock.mockReturnValue([makeDrawerFinding()]);
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
includeMutedInOtherFindings: true,
}),
);
@@ -264,10 +313,11 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
await Promise.resolve();
});
expect(result.current.currentFinding?.id).toBe("current");
expect(result.current.otherFindings.map((f) => f.id)).toEqual([
"other-fail",
]);
expect(getLatestFindingsByResourceUidMock).toHaveBeenCalledWith({
resourceUid: "arn:aws:s3:::my-bucket",
pageSize: 50,
includeMuted: true,
});
});
it("should keep isNavigating true for a cached resource long enough to render skeletons", async () => {
@@ -288,19 +338,19 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
}),
];
getLatestFindingsByResourceUidMock.mockImplementation(
async ({ resourceUid }: { resourceUid: string }) => ({
data: [resourceUid],
}),
);
getFindingByIdMock.mockImplementation(async (findingId: string) => ({
data: [findingId],
}));
adaptFindingsByResourceResponseMock.mockImplementation(
(response: { data: string[] }) => [
makeDrawerFinding({
id: response.data[0].includes("first") ? "finding-1" : "finding-2",
resourceUid: response.data[0],
resourceName: response.data[0].includes("first")
? "first-bucket"
: "second-bucket",
id: response.data[0],
resourceUid:
response.data[0] === "finding-1"
? "arn:aws:s3:::first-bucket"
: "arn:aws:s3:::second-bucket",
resourceName:
response.data[0] === "finding-1" ? "first-bucket" : "second-bucket",
}),
],
);
@@ -308,7 +358,6 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
checkId: "s3_check",
}),
);
@@ -333,6 +382,8 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
expect(result.current.isNavigating).toBe(true);
await act(async () => {
await Promise.resolve();
await Promise.resolve();
vi.runAllTimers();
await Promise.resolve();
});
@@ -362,19 +413,19 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
}),
];
getLatestFindingsByResourceUidMock.mockImplementation(
async ({ resourceUid }: { resourceUid: string }) => ({
data: [resourceUid],
}),
);
getFindingByIdMock.mockImplementation(async (findingId: string) => ({
data: [findingId],
}));
adaptFindingsByResourceResponseMock.mockImplementation(
(response: { data: string[] }) => [
makeDrawerFinding({
id: response.data[0].includes("first") ? "finding-1" : "finding-2",
resourceUid: response.data[0],
resourceName: response.data[0].includes("first")
? "first-bucket"
: "second-bucket",
id: response.data[0],
resourceUid:
response.data[0] === "finding-1"
? "arn:aws:s3:::first-bucket"
: "arn:aws:s3:::second-bucket",
resourceName:
response.data[0] === "finding-1" ? "first-bucket" : "second-bucket",
}),
],
);
@@ -382,7 +433,6 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
checkId: "s3_check",
}),
);
@@ -427,6 +477,154 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
vi.useRealTimers();
});
it("should update checkMeta when navigating to a resource with a different check", async () => {
// Given
const resources = [
makeResource({
id: "row-1",
findingId: "finding-1",
checkId: "s3_check",
}),
makeResource({
id: "row-2",
findingId: "finding-2",
checkId: "ec2_check",
resourceUid: "arn:aws:ec2:::instance/i-123",
resourceName: "instance-1",
service: "ec2",
}),
];
getFindingByIdMock.mockImplementation(async (findingId: string) => ({
data: [findingId],
}));
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
adaptFindingsByResourceResponseMock.mockImplementation(
(response: { data: string[] }) => [
response.data[0] === "finding-1"
? makeDrawerFinding({
id: "finding-1",
checkId: "s3_check",
checkTitle: "S3 Check",
description: "s3 description",
})
: makeDrawerFinding({
id: "finding-2",
checkId: "ec2_check",
checkTitle: "EC2 Check",
description: "ec2 description",
}),
],
);
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
}),
);
// When
await act(async () => {
result.current.openDrawer(0);
await Promise.resolve();
});
expect(result.current.checkMeta?.checkTitle).toBe("S3 Check");
await act(async () => {
result.current.navigateNext();
await Promise.resolve();
});
// Then
expect(result.current.checkMeta?.checkTitle).toBe("EC2 Check");
expect(result.current.checkMeta?.description).toBe("ec2 description");
});
it("should keep the previous check metadata cached while reopening until the new finding arrives", async () => {
// Given
const resources = [
makeResource({
id: "row-1",
findingId: "finding-1",
checkId: "s3_check",
}),
makeResource({
id: "row-2",
findingId: "finding-2",
checkId: "ec2_check",
resourceUid: "arn:aws:ec2:::instance/i-123",
resourceName: "instance-1",
service: "ec2",
}),
];
let resolveSecondFinding: ((value: { data: string[] }) => void) | null =
null;
getFindingByIdMock.mockImplementation((findingId: string) => {
if (findingId === "finding-2") {
return new Promise((resolve) => {
resolveSecondFinding = resolve;
});
}
return Promise.resolve({ data: [findingId] });
});
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
adaptFindingsByResourceResponseMock.mockImplementation(
(response: { data: string[] }) => [
response.data[0] === "finding-1"
? makeDrawerFinding({
id: "finding-1",
checkId: "s3_check",
checkTitle: "S3 Check",
description: "s3 description",
})
: makeDrawerFinding({
id: "finding-2",
checkId: "ec2_check",
checkTitle: "EC2 Check",
description: "ec2 description",
}),
],
);
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
}),
);
await act(async () => {
result.current.openDrawer(0);
await Promise.resolve();
});
expect(result.current.checkMeta?.checkTitle).toBe("S3 Check");
// When
act(() => {
result.current.closeDrawer();
result.current.openDrawer(1);
});
// Then
expect(result.current.isOpen).toBe(true);
expect(result.current.currentIndex).toBe(1);
expect(result.current.currentFinding).toBeNull();
expect(result.current.checkMeta?.checkTitle).toBe("S3 Check");
await act(async () => {
resolveSecondFinding?.({ data: ["finding-2"] });
await Promise.resolve();
await Promise.resolve();
});
expect(result.current.checkMeta?.checkTitle).toBe("EC2 Check");
expect(result.current.checkMeta?.description).toBe("ec2 description");
});
it("should clear the previous resource findings when navigation to the next resource fails", async () => {
// Given
const resources = [
@@ -444,24 +642,24 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
}),
];
getLatestFindingsByResourceUidMock.mockImplementation(
async ({ resourceUid }: { resourceUid: string }) => {
if (resourceUid.includes("second")) {
throw new Error("Fetch failed");
}
getFindingByIdMock.mockImplementation(async (findingId: string) => {
if (findingId === "finding-2") {
throw new Error("Fetch failed");
}
return { data: [resourceUid] };
},
);
return { data: [findingId] };
});
adaptFindingsByResourceResponseMock.mockImplementation(
(response: { data: string[] }) => [
makeDrawerFinding({
id: response.data[0].includes("first") ? "finding-1" : "finding-2",
resourceUid: response.data[0],
resourceName: response.data[0].includes("first")
? "first-bucket"
: "second-bucket",
id: response.data[0],
resourceUid:
response.data[0] === "finding-1"
? "arn:aws:s3:::first-bucket"
: "arn:aws:s3:::second-bucket",
resourceName:
response.data[0] === "finding-1" ? "first-bucket" : "second-bucket",
}),
],
);
@@ -469,7 +667,6 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
checkId: "s3_check",
}),
);
@@ -481,6 +678,7 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
expect(result.current.currentFinding?.resourceUid).toBe(
"arn:aws:s3:::first-bucket",
);
expect(result.current.checkMeta?.checkTitle).toBe("S3 Check");
// When
await act(async () => {
@@ -492,5 +690,123 @@ describe("useResourceDetailDrawer — other findings filtering", () => {
expect(result.current.currentIndex).toBe(1);
expect(result.current.currentFinding).toBeNull();
expect(result.current.otherFindings).toEqual([]);
expect(result.current.checkMeta).toBeNull();
});
it("should clear other findings immediately while the next resource is loading", async () => {
// Given
const resources = [
makeResource({
id: "row-1",
findingId: "finding-1",
resourceUid: "arn:aws:s3:::first-bucket",
resourceName: "first-bucket",
}),
makeResource({
id: "row-2",
findingId: "finding-2",
resourceUid: "arn:aws:s3:::second-bucket",
resourceName: "second-bucket",
}),
];
let resolveSecondFinding: ((value: { data: string[] }) => void) | null =
null;
let resolveSecondResource: ((value: { data: string[] }) => void) | null =
null;
getFindingByIdMock.mockImplementation((findingId: string) => {
if (findingId === "finding-2") {
return new Promise((resolve) => {
resolveSecondFinding = resolve;
});
}
return Promise.resolve({ data: [findingId] });
});
getLatestFindingsByResourceUidMock.mockImplementation(
({ resourceUid }: { resourceUid: string }) => {
if (resourceUid === "arn:aws:s3:::second-bucket") {
return new Promise((resolve) => {
resolveSecondResource = resolve;
});
}
return Promise.resolve({ data: ["resource-1"] });
},
);
adaptFindingsByResourceResponseMock.mockImplementation(
(response: { data: string[] }) => {
if (response.data[0] === "finding-1") {
return [makeDrawerFinding({ id: "finding-1" })];
}
if (response.data[0] === "finding-2") {
return [
makeDrawerFinding({
id: "finding-2",
resourceUid: "arn:aws:s3:::second-bucket",
resourceName: "second-bucket",
}),
];
}
if (response.data[0] === "resource-1") {
return [
makeDrawerFinding({
id: "finding-3",
checkTitle: "First bucket other finding",
resourceUid: "arn:aws:s3:::first-bucket",
}),
];
}
return [
makeDrawerFinding({
id: "finding-4",
checkTitle: "Second bucket other finding",
resourceUid: "arn:aws:s3:::second-bucket",
}),
];
},
);
const { result } = renderHook(() =>
useResourceDetailDrawer({
resources,
}),
);
await act(async () => {
result.current.openDrawer(0);
await Promise.resolve();
});
expect(result.current.otherFindings.map((finding) => finding.id)).toEqual([
"finding-3",
]);
// When
act(() => {
result.current.navigateNext();
});
// Then
expect(result.current.currentIndex).toBe(1);
expect(result.current.currentFinding).toBeNull();
expect(result.current.otherFindings).toEqual([]);
await act(async () => {
resolveSecondFinding?.({ data: ["finding-2"] });
resolveSecondResource?.({ data: ["resource-2"] });
await Promise.resolve();
await Promise.resolve();
});
expect(result.current.otherFindings.map((finding) => finding.id)).toEqual([
"finding-4",
]);
});
});
@@ -4,6 +4,7 @@ import { useEffect, useRef, useState } from "react";
import {
adaptFindingsByResourceResponse,
getFindingById,
getLatestFindingsByResourceUid,
type ResourceDrawerFinding,
} from "@/actions/findings";
@@ -43,10 +44,11 @@ function extractCheckMeta(finding: ResourceDrawerFinding): CheckMeta {
interface UseResourceDetailDrawerOptions {
resources: FindingResourceRow[];
checkId: string;
totalResourceCount?: number;
onRequestMoreResources?: () => void;
initialIndex?: number | null;
canLoadOtherFindings?: boolean;
includeMutedInOtherFindings?: boolean;
}
interface UseResourceDetailDrawerReturn {
@@ -56,9 +58,9 @@ interface UseResourceDetailDrawerReturn {
checkMeta: CheckMeta | null;
currentIndex: number;
totalResources: number;
currentResource: FindingResourceRow | null;
currentFinding: ResourceDrawerFinding | null;
otherFindings: ResourceDrawerFinding[];
allFindings: ResourceDrawerFinding[];
openDrawer: (index: number) => void;
closeDrawer: () => void;
navigatePrev: () => void;
@@ -70,23 +72,33 @@ interface UseResourceDetailDrawerReturn {
/**
* Manages the resource detail drawer state, fetching, and navigation.
*
* Caches findings per resourceUid in a Map ref so navigating prev/next
* Caches findings per findingId in a Map ref so navigating prev/next
* doesn't re-fetch already-visited resources.
*/
export function useResourceDetailDrawer({
resources,
checkId,
totalResourceCount,
onRequestMoreResources,
initialIndex = null,
canLoadOtherFindings = true,
includeMutedInOtherFindings = false,
}: UseResourceDetailDrawerOptions): UseResourceDetailDrawerReturn {
const [isOpen, setIsOpen] = useState(initialIndex !== null);
const [isLoading, setIsLoading] = useState(false);
const [currentIndex, setCurrentIndex] = useState(initialIndex ?? 0);
const [findings, setFindings] = useState<ResourceDrawerFinding[]>([]);
const [currentFinding, setCurrentFinding] =
useState<ResourceDrawerFinding | null>(null);
const [otherFindings, setOtherFindings] = useState<ResourceDrawerFinding[]>(
[],
);
const [isNavigating, setIsNavigating] = useState(false);
const cacheRef = useRef<Map<string, ResourceDrawerFinding[]>>(new Map());
const currentFindingCacheRef = useRef<
Map<string, ResourceDrawerFinding | null>
>(new Map());
const otherFindingsCacheRef = useRef<Map<string, ResourceDrawerFinding[]>>(
new Map(),
);
const checkMetaRef = useRef<CheckMeta | null>(null);
const fetchControllerRef = useRef<AbortController | null>(null);
const navigationTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(
@@ -134,6 +146,11 @@ export function useResourceDetailDrawer({
setIsNavigating(true);
};
const resetCurrentResourceState = () => {
setCurrentFinding(null);
setOtherFindings([]);
};
// Abort any in-flight request on unmount to prevent state updates
// on an already-unmounted component.
useEffect(() => {
@@ -144,46 +161,83 @@ export function useResourceDetailDrawer({
};
}, []);
const fetchFindings = async (resourceUid: string) => {
const fetchFindings = async (resource: FindingResourceRow) => {
// Abort any in-flight request to prevent stale data from out-of-order responses
fetchControllerRef.current?.abort();
clearNavigationTimeout();
const controller = new AbortController();
fetchControllerRef.current = controller;
// Check cache first
const cached = cacheRef.current.get(resourceUid);
if (cached) {
if (!checkMetaRef.current) {
const main = cached.find((f) => f.checkId === checkId) ?? cached[0];
if (main) checkMetaRef.current = extractCheckMeta(main);
const { findingId, resourceUid } = resource;
const fetchCurrentFinding = async () => {
const cached = currentFindingCacheRef.current.get(findingId);
if (cached !== undefined) {
return cached;
}
setFindings(cached);
finishNavigation();
return;
}
const response = await getFindingById(
findingId,
"resources,scan.provider",
{ source: "resource-detail-drawer" },
);
const adapted = adaptFindingsByResourceResponse(response);
const finding =
adapted.find((item) => item.id === findingId) ?? adapted[0] ?? null;
currentFindingCacheRef.current.set(findingId, finding);
return finding;
};
const fetchOtherFindings = async () => {
if (!canLoadOtherFindings || !resourceUid) {
return [];
}
const cached = otherFindingsCacheRef.current.get(resourceUid);
if (cached) {
return cached;
}
const response = await getLatestFindingsByResourceUid({
resourceUid,
pageSize: 50,
includeMuted: includeMutedInOtherFindings,
});
const adapted = adaptFindingsByResourceResponse(response);
otherFindingsCacheRef.current.set(resourceUid, adapted);
return adapted;
};
setIsLoading(true);
try {
const response = await getLatestFindingsByResourceUid({ resourceUid });
const [nextCurrentFinding, nextOtherFindings] = await Promise.all([
fetchCurrentFinding(),
fetchOtherFindings(),
]);
// Discard stale response if a newer request was started
if (controller.signal.aborted) return;
const adapted = adaptFindingsByResourceResponse(response);
cacheRef.current.set(resourceUid, adapted);
checkMetaRef.current = nextCurrentFinding
? extractCheckMeta(nextCurrentFinding)
: null;
// Extract check-level metadata once (stable across all resources)
if (!checkMetaRef.current) {
const main = adapted.find((f) => f.checkId === checkId) ?? adapted[0];
if (main) checkMetaRef.current = extractCheckMeta(main);
}
setFindings(adapted);
} catch (error) {
setCurrentFinding(nextCurrentFinding);
// The API already filters to status=FAIL (see getLatestFindingsByResourceUid).
// Only need to drop the current finding from the list.
setOtherFindings(
nextOtherFindings.filter((finding) => finding.id !== findingId),
);
} catch (_error) {
if (!controller.signal.aborted) {
console.error("Error fetching findings for resource:", error);
setFindings([]);
checkMetaRef.current = null;
setCurrentFinding(null);
setOtherFindings([]);
}
} finally {
if (!controller.signal.aborted) {
@@ -202,7 +256,7 @@ export function useResourceDetailDrawer({
return;
}
fetchFindings(resource.resourceUid);
fetchFindings(resource);
// Only initialize once on mount for deep-link/inline entry points.
// User-driven navigations use openDrawer/navigateTo afterwards.
// eslint-disable-next-line react-hooks/exhaustive-deps
@@ -212,13 +266,11 @@ export function useResourceDetailDrawer({
const resource = resources[index];
if (!resource) return;
clearNavigationTimeout();
navigationStartedAtRef.current = null;
setCurrentIndex(index);
setIsOpen(true);
setIsNavigating(false);
setFindings([]);
fetchFindings(resource.resourceUid);
startNavigation();
resetCurrentResourceState();
fetchFindings(resource);
};
const closeDrawer = () => {
@@ -228,10 +280,11 @@ export function useResourceDetailDrawer({
const refetchCurrent = () => {
const resource = resources[currentIndex];
if (!resource) return;
cacheRef.current.delete(resource.resourceUid);
currentFindingCacheRef.current.delete(resource.findingId);
otherFindingsCacheRef.current.delete(resource.resourceUid);
startNavigation();
setFindings([]);
fetchFindings(resource.resourceUid);
resetCurrentResourceState();
fetchFindings(resource);
};
const navigateTo = (index: number) => {
@@ -240,8 +293,8 @@ export function useResourceDetailDrawer({
setCurrentIndex(index);
startNavigation();
setFindings([]);
fetchFindings(resource.resourceUid);
resetCurrentResourceState();
fetchFindings(resource);
};
const navigatePrev = () => {
@@ -265,17 +318,7 @@ export function useResourceDetailDrawer({
}
};
// The finding whose checkId matches the drill-down's checkId
const currentFinding =
findings.find((f) => f.checkId === checkId) ?? findings[0] ?? null;
// "Other Findings For This Resource" intentionally shows only FAIL entries,
// while currentFinding (the drilled-down one) can be any status (FAIL, MANUAL, PASS…).
const otherFindings = (
currentFinding
? findings.filter((f) => f.id !== currentFinding.id)
: findings
).filter((f) => f.status === "FAIL");
const currentResource = resources[currentIndex];
return {
isOpen,
@@ -284,9 +327,9 @@ export function useResourceDetailDrawer({
checkMeta: checkMetaRef.current,
currentIndex,
totalResources: totalResourceCount ?? resources.length,
currentResource: currentResource ?? null,
currentFinding,
otherFindings,
allFindings: findings,
openDrawer,
closeDrawer,
navigatePrev,
@@ -0,0 +1,48 @@
import { render, screen } from "@testing-library/react";
import type { ReactNode } from "react";
import { describe, expect, it, vi } from "vitest";
vi.mock("next/link", () => ({
default: ({
children,
href,
...rest
}: {
children: ReactNode;
href: string;
"aria-label"?: string;
className?: string;
}) => (
<a href={href} {...rest}>
{children}
</a>
),
}));
import { LinkToFindings } from "./link-to-findings";
describe("LinkToFindings", () => {
it("should link to findings sorted by severity (desc) then last_seen_at (desc), filtered to FAIL + new delta", () => {
render(<LinkToFindings />);
const link = screen.getByRole("link", { name: "Go to Findings page" });
const href = link.getAttribute("href") ?? "";
const [, query = ""] = href.split("?");
const params = new URLSearchParams(query);
expect(params.get("sort")).toBe("-severity,-last_seen_at");
expect(params.get("filter[status__in]")).toBe("FAIL");
// filter[delta] must be singular — the finding-groups filter does not
// register `delta__in`, so the plural form is silently dropped by the API.
expect(params.get("filter[delta]")).toBe("new");
expect(params.has("filter[delta__in]")).toBe(false);
});
it("should render as a tertiary text link (not a solid button) to match the overview Card pattern", () => {
render(<LinkToFindings />);
const link = screen.getByRole("link", { name: "Go to Findings page" });
expect(link.className).toContain("text-button-tertiary");
expect(link.className).toContain("hover:text-button-tertiary-hover");
});
});
@@ -1,20 +1,13 @@
"use client";
import Link from "next/link";
import { Button } from "@/components/shadcn/button/button";
export const LinkToFindings = () => {
return (
<div className="mt-4 flex w-full items-center justify-end">
<Button asChild variant="default" size="sm">
<Link
href="/findings?sort=severity,-inserted_at&filter[status__in]=FAIL&filter[delta__in]=new"
aria-label="Go to Findings page"
>
Check out on Findings
</Link>
</Button>
</div>
<Link
href="/findings?sort=-severity,-last_seen_at&filter[status__in]=FAIL&filter[delta]=new"
aria-label="Go to Findings page"
className="text-button-tertiary hover:text-button-tertiary-hover text-sm font-medium transition-colors"
>
Check out on Findings
</Link>
);
};
@@ -1,39 +1,114 @@
import React from "react";
import { Card } from "@/components/shadcn/card/card";
import { Skeleton } from "@/components/shadcn/skeleton/skeleton";
export const SkeletonTableNewFindings = () => {
const columns = 7;
const rows = 3;
const SkeletonTableRow = () => {
return (
<Card variant="base" padding="md" className="flex flex-col gap-4">
{/* Table headers */}
<div className="flex gap-4">
{Array.from({ length: columns }).map((_, index) => (
<Skeleton
key={`header-${index}`}
className="h-8"
style={{ width: `${100 / columns}%` }}
/>
))}
</div>
{/* Table body */}
<div className="flex flex-col gap-3">
{Array.from({ length: rows }).map((_, rowIndex) => (
<div key={`row-${rowIndex}`} className="flex gap-4">
{Array.from({ length: columns }).map((_, colIndex) => (
<Skeleton
key={`cell-${rowIndex}-${colIndex}`}
className="h-12"
style={{ width: `${100 / columns}%` }}
/>
))}
</div>
))}
</div>
</Card>
<tr className="border-border-neutral-secondary border-b last:border-b-0">
{/* Notification dot */}
<td className="px-3 py-4">
<Skeleton className="size-2 rounded-full" />
</td>
{/* Status badge */}
<td className="px-3 py-4">
<Skeleton className="h-6 w-14 rounded-full" />
</td>
{/* Finding title */}
<td className="px-3 py-4">
<Skeleton className="h-4 w-56 rounded" />
</td>
{/* Resource name */}
<td className="px-3 py-4">
<div className="flex items-center gap-2">
<Skeleton className="size-4 rounded" />
<Skeleton className="h-4 w-24 rounded" />
</div>
</td>
{/* Severity badge */}
<td className="px-3 py-4">
<Skeleton className="h-6 w-16 rounded-full" />
</td>
{/* Provider icon */}
<td className="px-3 py-4">
<Skeleton className="size-8 rounded-md" />
</td>
{/* Service */}
<td className="px-3 py-4">
<Skeleton className="h-4 w-16 rounded" />
</td>
{/* Region — flag + name */}
<td className="px-3 py-4">
<div className="flex items-center gap-1.5">
<Skeleton className="size-4 rounded" />
<Skeleton className="h-4 w-20 rounded" />
</div>
</td>
{/* Time */}
<td className="px-3 py-4">
<Skeleton className="h-4 w-24 rounded" />
</td>
</tr>
);
};
export const SkeletonTableNewFindings = () => {
const rows = 10;
return (
<div className="rounded-large shadow-small border-border-neutral-secondary bg-bg-neutral-secondary flex w-full flex-col gap-4 overflow-hidden border p-4">
{/* Header: title + description on the left, link on the right */}
<div className="flex w-full items-center justify-between gap-4">
<div className="flex flex-col gap-1">
<Skeleton className="h-5 w-64 rounded" />
<Skeleton className="h-3 w-80 rounded" />
</div>
<Skeleton className="h-4 w-40 rounded" />
</div>
{/* Table */}
<table className="w-full">
<thead>
<tr className="border-border-neutral-secondary border-b">
{/* Notification header (no text) */}
<th className="w-8 py-3" />
{/* Status */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-14 rounded" />
</th>
{/* Finding */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-16 rounded" />
</th>
{/* Resource name */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-28 rounded" />
</th>
{/* Severity */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-16 rounded" />
</th>
{/* Cloud Provider */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-24 rounded" />
</th>
{/* Service */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-14 rounded" />
</th>
{/* Region */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-14 rounded" />
</th>
{/* Time */}
<th className="px-3 py-3 text-left">
<Skeleton className="h-4 w-12 rounded" />
</th>
</tr>
</thead>
<tbody>
{Array.from({ length: rows }).map((_, i) => (
<SkeletonTableRow key={i} />
))}
</tbody>
</table>
</div>
);
};
@@ -125,7 +125,10 @@ export const ProvidersFilters = ({
placeholder={`All ${filter.labelCheckboxGroup}`}
/>
</MultiSelectTrigger>
<MultiSelectContent search={false}>
<MultiSelectContent
search={false}
width={filter.width ?? "default"}
>
<MultiSelectSelectAll>Select All</MultiSelectSelectAll>
<MultiSelectSeparator />
{filter.values.map((value) => {
@@ -0,0 +1,22 @@
import { readFileSync } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { describe, expect, it } from "vitest";
describe("column-get-scans", () => {
const currentDir = path.dirname(fileURLToPath(import.meta.url));
const filePath = path.join(currentDir, "column-get-scans.tsx");
const source = readFileSync(filePath, "utf8");
it("links scan findings to the historical finding-groups filters", () => {
expect(source).toContain("filter[scan]=");
expect(source).toContain("filter[inserted_at]=");
expect(source).not.toContain("filter[scan__in]");
});
it("links the findings filter against the scan's completed_at (what the backend expects)", () => {
expect(source).toMatch(/attributes:\s*{\s*completed_at\s*}/);
expect(source).toMatch(/toLocalDateString\(completed_at\)/);
});
});
@@ -8,10 +8,10 @@ import { TableLink } from "@/components/ui/custom";
import { DateWithTime, EntityInfo } from "@/components/ui/entities";
import { TriggerSheet } from "@/components/ui/sheet";
import { DataTableColumnHeader, StatusBadge } from "@/components/ui/table";
import { toLocalDateString } from "@/lib/date-utils";
import { ProviderType, ScanProps } from "@/types";
import { TriggerIcon } from "../../trigger-icon";
import { DataTableDownloadDetails } from "./data-table-download-details";
import { DataTableRowActions } from "./data-table-row-actions";
import { DataTableRowDetails } from "./data-table-row-details";
@@ -97,24 +97,6 @@ export const ColumnGetScans: ColumnDef<ScanProps>[] = [
enableSorting: false,
},
{
accessorKey: "started_at",
header: ({ column }) => (
<DataTableColumnHeader column={column} title="Started at" />
),
cell: ({ row }) => {
const {
attributes: { started_at },
} = getScanData(row);
return (
<div className="w-[100px]">
<DateWithTime dateTime={started_at} />
</div>
);
},
enableSorting: false,
},
{
accessorKey: "status",
header: ({ column }) => (
@@ -141,12 +123,22 @@ export const ColumnGetScans: ColumnDef<ScanProps>[] = [
<DataTableColumnHeader column={column} title="Findings" />
),
cell: ({ row }) => {
const { id } = getScanData(row);
const {
id,
attributes: { completed_at },
} = getScanData(row);
const scanState = row.original.attributes?.state;
// Source is `completed_at` (scan finish time) because findings are
// persisted when the scan ends — that's when their `inserted_at` is
// written. The URL key stays `filter[inserted_at]` because the findings
// table is partitioned by the finding's `inserted_at` date; this filter
// is the partition hint the backend uses to avoid scanning every
// partition. Names differ by design: scan.completed_at ≈ finding.inserted_at.
const scanDate = toLocalDateString(completed_at);
return (
<TableLink
href={`/findings?filter[scan__in]=${id}&filter[status__in]=FAIL`}
isDisabled={scanState !== "completed"}
href={`/findings?filter[scan]=${id}&filter[inserted_at]=${scanDate}&filter[status__in]=FAIL`}
isDisabled={scanState !== "completed" || !scanDate}
label="See Findings"
/>
);
@@ -171,24 +163,10 @@ export const ColumnGetScans: ColumnDef<ScanProps>[] = [
},
enableSorting: false,
},
{
id: "download",
header: ({ column }) => (
<DataTableColumnHeader column={column} title="Download" />
),
cell: ({ row }) => {
return (
<div className="mx-auto w-fit">
<DataTableDownloadDetails row={row} />
</div>
);
},
enableSorting: false,
},
{
accessorKey: "resources",
header: ({ column }) => (
<DataTableColumnHeader column={column} title="Resources" />
<DataTableColumnHeader column={column} title="Impacted Resources" />
),
cell: ({ row }) => {
const {
@@ -202,6 +180,24 @@ export const ColumnGetScans: ColumnDef<ScanProps>[] = [
},
enableSorting: false,
},
{
accessorKey: "started_at",
header: ({ column }) => (
<DataTableColumnHeader column={column} title="Started at" />
),
cell: ({ row }) => {
const {
attributes: { started_at },
} = getScanData(row);
return (
<div className="w-[100px]">
<DateWithTime dateTime={started_at} />
</div>
);
},
enableSorting: false,
},
{
accessorKey: "scheduled_at",
header: ({ column }) => (
@@ -1,34 +0,0 @@
import { Row } from "@tanstack/react-table";
import { useState } from "react";
import { DownloadIconButton, useToast } from "@/components/ui";
import { downloadScanZip } from "@/lib";
interface DataTableDownloadDetailsProps<ScanProps> {
row: Row<ScanProps>;
}
export function DataTableDownloadDetails<ScanProps>({
row,
}: DataTableDownloadDetailsProps<ScanProps>) {
const { toast } = useToast();
const [isDownloading, setIsDownloading] = useState(false);
const scanId = (row.original as { id: string }).id;
const scanState = (row.original as any).attributes?.state;
const handleDownload = async () => {
setIsDownloading(true);
await downloadScanZip(scanId, toast);
setIsDownloading(false);
};
return (
<DownloadIconButton
paramId={scanId}
onDownload={handleDownload}
isDownloading={isDownloading}
isDisabled={scanState !== "completed"}
/>
);
}
@@ -47,6 +47,33 @@ describe("MultiSelect", () => {
expect(
within(screen.getByRole("combobox")).getByText("Production AWS"),
).toBeInTheDocument();
expect(
within(screen.getByRole("combobox")).queryByText("Select accounts"),
).not.toBeInTheDocument();
});
it("keeps the filter label context when a value is selected", () => {
render(
<MultiSelect values={["FAIL"]} onValuesChange={() => {}}>
<MultiSelectTrigger>
<MultiSelectValue placeholder="All Status" />
</MultiSelectTrigger>
<MultiSelectContent search={false}>
<MultiSelectItem value="FAIL">FAIL</MultiSelectItem>
<MultiSelectItem value="PASS">PASS</MultiSelectItem>
</MultiSelectContent>
</MultiSelect>,
);
expect(
within(screen.getByRole("combobox")).getByText("Status"),
).toBeInTheDocument();
expect(
within(screen.getByRole("combobox")).getByText("FAIL"),
).toBeInTheDocument();
expect(
within(screen.getByRole("combobox")).queryByText("All Status"),
).not.toBeInTheDocument();
});
it("filters items without crashing when search is enabled", async () => {
+10 -1
View File
@@ -163,6 +163,10 @@ export function MultiSelectValue({
const shouldWrap =
overflowBehavior === "wrap" ||
(overflowBehavior === "wrap-when-open" && open);
const selectedContextLabel =
placeholder && /^All\s+/i.test(placeholder) && selectedValues.size > 0
? placeholder.replace(/^All\s+/i, "").trim()
: "";
const checkOverflow = useCallback(() => {
if (valueRef.current === null) return;
@@ -222,11 +226,16 @@ export function MultiSelectValue({
className,
)}
>
{placeholder && (
{placeholder && selectedValues.size === 0 && (
<span className="text-bg-button-secondary shrink-0 font-normal">
{placeholder}
</span>
)}
{selectedContextLabel && (
<span className="text-bg-button-secondary shrink-0 font-normal">
{selectedContextLabel}
</span>
)}
{Array.from(selectedValues)
.filter((value) => items.has(value))
.map((value) => (
@@ -62,8 +62,16 @@ vi.mock("@/components/shadcn/select/multiselect", () => ({
MultiSelectValue: ({ placeholder }: { placeholder: string }) => (
<span>{placeholder}</span>
),
MultiSelectContent: ({ children }: { children: React.ReactNode }) => (
<>{children}</>
MultiSelectContent: ({
children,
width,
}: {
children: React.ReactNode;
width?: string;
}) => (
<div data-testid="multiselect-content" data-width={width ?? "default"}>
{children}
</div>
),
MultiSelectSelectAll: ({ children }: { children: React.ReactNode }) => (
<button type="button">{children}</button>
@@ -114,6 +122,13 @@ const severityFilter: FilterOption = {
values: ["critical", "high"],
};
const scanFilter: FilterOption = {
key: "filter[scan__in]",
labelCheckboxGroup: "Scan ID",
values: ["scan-1"],
width: "wide",
};
describe("DataTableFilterCustom — batch vs instant mode", () => {
beforeEach(() => {
vi.clearAllMocks();
@@ -275,4 +290,15 @@ describe("DataTableFilterCustom — batch vs instant mode", () => {
expect(screen.getByRole("button", { name: "Clear" })).toBeInTheDocument();
});
});
describe("dropdown width", () => {
it("should propagate the filter width to the dropdown content", () => {
render(<DataTableFilterCustom filters={[scanFilter]} />);
expect(screen.getByTestId("multiselect-content")).toHaveAttribute(
"data-width",
"wide",
);
});
});
});
@@ -15,7 +15,12 @@ import {
} from "@/components/shadcn/select/multiselect";
import { EntityInfo } from "@/components/ui/entities/entity-info";
import { useUrlFilters } from "@/hooks/use-url-filters";
import { isConnectionStatus, isScanEntity } from "@/lib/helper-filters";
import {
getScanEntityLabel,
isConnectionStatus,
isScanEntity,
} from "@/lib/helper-filters";
import { cn } from "@/lib/utils";
import {
FilterEntity,
FilterOption,
@@ -29,6 +34,8 @@ export interface DataTableFilterCustomProps {
filters: FilterOption[];
/** Optional element to render at the start of the filters grid */
prependElement?: React.ReactNode;
/** Optional className override for the filters grid layout */
gridClassName?: string;
/** Hide the clear filters button and active badges (useful when parent manages this) */
hideClearButton?: boolean;
/**
@@ -54,6 +61,7 @@ export interface DataTableFilterCustomProps {
export const DataTableFilterCustom = ({
filters,
prependElement,
gridClassName,
hideClearButton = false,
mode = DATA_TABLE_FILTER_MODE.INSTANT,
onBatchChange,
@@ -80,10 +88,11 @@ export const DataTableFilterCustom = ({
if (!entity) return value;
if (isScanEntity(entity as ScanEntity)) {
const scanEntity = entity as ScanEntity;
return (
scanEntity.providerInfo?.alias || scanEntity.providerInfo?.uid || value
);
// Match the summary-strip chip: "Scan: {provider} - {name}". Without the
// "Scan:" prefix, the trigger badge would just say "AWS Prod - Nightly",
// which reads as a generic account tag and hides that it's a scan filter.
const label = getScanEntityLabel(entity as ScanEntity);
return label ? `Scan: ${label}` : value;
}
if (isConnectionStatus(entity)) {
const connectionStatus = entity as ProviderConnectionStatus;
@@ -173,7 +182,12 @@ export const DataTableFilterCustom = ({
};
return (
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 2xl:grid-cols-5">
<div
className={cn(
"grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 2xl:grid-cols-5",
gridClassName,
)}
>
{prependElement}
{sortedFilters().map((filter) => {
const selectedValues = getSelectedValues(filter);
@@ -189,7 +203,10 @@ export const DataTableFilterCustom = ({
placeholder={`All ${filter.labelCheckboxGroup}`}
/>
</MultiSelectTrigger>
<MultiSelectContent search={false}>
<MultiSelectContent
search={false}
width={filter.width ?? "default"}
>
<MultiSelectSelectAll>Select All</MultiSelectSelectAll>
<MultiSelectSeparator />
{filter.values.map((value) => {
+4
View File
@@ -110,6 +110,8 @@ interface DataTableProviderProps<TData, TValue> {
searchBadge?: { label: string; onDismiss: () => void };
/** Optional click handler for top-level rows. */
onRowClick?: (row: Row<TData>) => void;
/** Optional header rendered inside the table container, above the toolbar. */
header?: ReactNode;
}
export function DataTable<TData, TValue>({
@@ -140,6 +142,7 @@ export function DataTable<TData, TValue>({
renderAfterRow,
searchBadge,
onRowClick,
header,
}: DataTableProviderProps<TData, TValue>) {
const [sorting, setSorting] = useState<SortingState>([]);
const [columnFilters, setColumnFilters] = useState<ColumnFiltersState>([]);
@@ -235,6 +238,7 @@ export function DataTable<TData, TValue>({
isPending && "pointer-events-none opacity-60",
)}
>
{header && <div className="w-full">{header}</div>}
{/* Table Toolbar */}
{showToolbar && (
<div className="flex items-center justify-between">
+37
View File
@@ -61,6 +61,43 @@ describe("useFilterBatch", () => {
});
expect(result.current.hasChanges).toBe(false);
});
it("should expose filter[delta]=new under the FilterType.DELTA key so the dropdown shows it selected", async () => {
// Given — URL from LinkToFindings uses `filter[delta]` (singular), matching the API.
setSearchParams({
"filter[status__in]": "FAIL",
"filter[delta]": "new",
});
const { FilterType } = await import("@/types/filters");
// When
const { result } = renderHook(() => useFilterBatch());
// Then — the Delta dropdown reads via getFilterValue(`filter[${FilterType.DELTA}]`).
// For the checkbox of "new" to appear checked, that lookup must return ["new"].
expect(
result.current.getFilterValue(`filter[${FilterType.DELTA}]`),
).toEqual(["new"]);
});
it("should include both filter[status__in] and filter[delta] from the overview deep link", () => {
// Given — URL produced by LinkToFindings: /findings?...&filter[status__in]=FAIL&filter[delta]=new
setSearchParams({
"filter[status__in]": "FAIL",
"filter[delta]": "new",
});
// When
const { result } = renderHook(() => useFilterBatch());
// Then — the singular `filter[delta]` key must be captured in pendingFilters
// so FindingsFilters can render a chip for it (same as filter[status__in]).
expect(result.current.pendingFilters).toEqual({
"filter[status__in]": ["FAIL"],
"filter[delta]": ["new"],
});
});
});
// ── Excluded keys ──────────────────────────────────────────────────────────
@@ -0,0 +1,15 @@
import { readFileSync } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { describe, expect, it } from "vitest";
describe("useFindingGroupResourceState", () => {
const currentDir = path.dirname(fileURLToPath(import.meta.url));
const filePath = path.join(currentDir, "use-finding-group-resource-state.ts");
const source = readFileSync(filePath, "utf8");
it("enables muted findings only for the finding-group resource drawer", () => {
expect(source).toContain("includeMutedInOtherFindings: true");
});
});
+2 -1
View File
@@ -80,9 +80,10 @@ export function useFindingGroupResourceState({
const drawer = useResourceDetailDrawer({
resources,
checkId: group.checkId,
totalResourceCount: totalCount ?? group.resourcesTotal,
onRequestMoreResources: loadMore,
canLoadOtherFindings: group.resourcesTotal !== 0,
includeMutedInOtherFindings: true,
});
const handleDrawerMuteComplete = () => {
+35
View File
@@ -0,0 +1,35 @@
import { format, parseISO } from "date-fns";
import { describe, expect, it } from "vitest";
import { toLocalDateString } from "./date-utils";
describe("toLocalDateString", () => {
it("returns undefined for nullish or empty input", () => {
expect(toLocalDateString(undefined)).toBeUndefined();
expect(toLocalDateString(null)).toBeUndefined();
expect(toLocalDateString("")).toBeUndefined();
});
it("returns undefined for malformed strings", () => {
expect(toLocalDateString("not-a-date")).toBeUndefined();
});
it("returns undefined for invalid Date instances", () => {
expect(toLocalDateString(new Date("not-a-date"))).toBeUndefined();
});
it("formats an ISO string in the user's local timezone", () => {
// Near UTC midnight — the UTC split ("2026-04-19") differs from the local
// date for any tz with a positive offset. We pin parity with date-fns so
// the assertion holds regardless of where CI runs.
const iso = "2026-04-19T23:15:00Z";
const expected = format(parseISO(iso), "yyyy-MM-dd");
expect(toLocalDateString(iso)).toBe(expected);
});
it("formats a Date instance using its local calendar day", () => {
const date = new Date(2026, 3, 20, 10, 0, 0); // April 20, 2026 local
expect(toLocalDateString(date)).toBe("2026-04-20");
});
});
+23 -1
View File
@@ -1,4 +1,26 @@
import { formatDistanceToNow } from "date-fns";
import { format, formatDistanceToNow, parseISO } from "date-fns";
/**
* Formats an ISO string or Date into a `yyyy-MM-dd` string in the user's local
* timezone. Mirrors the format used by `DateWithTime`, so UI chips/URLs built
* with this helper match what the user sees in tables and pickers. Returns
* undefined for null, empty, or malformed input so callers can guard on it
* (e.g. `isDisabled={!toLocalDateString(x)}`). Do NOT use this for UTC-based
* date bucketing (e.g. chart axes partitioned server-side by UTC day) that
* use case needs a separate UTC helper.
*/
export function toLocalDateString(
value: string | Date | null | undefined,
): string | undefined {
if (!value) return undefined;
try {
const date = typeof value === "string" ? parseISO(value) : value;
if (isNaN(date.getTime())) return undefined;
return format(date, "yyyy-MM-dd");
} catch {
return undefined;
}
}
/**
* Formats a duration in seconds to a human-readable string like "2h 5m 30s".
+115
View File
@@ -3,9 +3,11 @@ import { describe, expect, it } from "vitest";
import type { FindingGroupRow } from "@/types";
import {
canDrillDownFindingGroup,
getActiveStatusFilter,
getFilteredFindingGroupDelta,
getFindingGroupDelta,
getFindingGroupImpactedCounts,
isFindingGroupMuted,
} from "./findings-groups";
@@ -138,6 +140,119 @@ describe("getActiveStatusFilter", () => {
});
});
describe("getFindingGroupImpactedCounts", () => {
it("should fall back to pass and fail counts when resources total is zero", () => {
// Given
const group = makeGroup({
resourcesTotal: 0,
resourcesFail: 0,
failCount: 3,
passCount: 2,
muted: false,
});
// When
const result = getFindingGroupImpactedCounts(group);
// Then
expect(result).toEqual({ impacted: 3, total: 5 });
});
it("should include manual findings in fallback counts when resources total is zero", () => {
// Given
const group = makeGroup({
resourcesTotal: 0,
resourcesFail: 0,
failCount: 3,
passCount: 2,
manualCount: 4,
muted: false,
});
// When
const result = getFindingGroupImpactedCounts(group);
// Then
expect(result).toEqual({ impacted: 3, total: 9 });
});
it("should include muted pass and fail counts in the denominator when the result is muted", () => {
// Given
const group = makeGroup({
resourcesTotal: 0,
resourcesFail: 0,
failCount: 3,
passCount: 2,
failMutedCount: 4,
passMutedCount: 1,
muted: true,
});
// When
const result = getFindingGroupImpactedCounts(group);
// Then
expect(result).toEqual({ impacted: 3, total: 10 });
});
it("should keep resource-based counts when resources total is available", () => {
// Given
const group = makeGroup({
resourcesTotal: 6,
resourcesFail: 4,
failCount: 2,
passCount: 1,
failMutedCount: 5,
passMutedCount: 3,
muted: true,
});
// When
const result = getFindingGroupImpactedCounts(group);
// Then
expect(result).toEqual({ impacted: 4, total: 6 });
});
});
describe("canDrillDownFindingGroup", () => {
it("should allow drill-down when resources exist", () => {
expect(
canDrillDownFindingGroup(
makeGroup({
resourcesTotal: 2,
failCount: 0,
}),
),
).toBe(true);
});
it("should keep zero-resource fallback groups non-expandable even when fallback counts are present", () => {
expect(
canDrillDownFindingGroup(
makeGroup({
resourcesTotal: 0,
failCount: 0,
passCount: 2,
manualCount: 1,
}),
),
).toBe(false);
});
it("should keep drill-down disabled for zero-resource groups when the displayed total is zero", () => {
expect(
canDrillDownFindingGroup(
makeGroup({
resourcesTotal: 0,
failCount: 0,
passCount: 0,
}),
),
).toBe(false);
});
});
describe("getFilteredFindingGroupDelta", () => {
it("falls back to the aggregate delta when no status filter is active", () => {
expect(

Some files were not shown because too many files have changed in this diff Show More