Compare commits

...

138 Commits

Author SHA1 Message Date
HugoPBrito
15e3c6c158 chore(sdk): add changelog entry for Cloudflare threading optimization 2026-02-06 12:52:17 +01:00
HugoPBrito
7647d08357 perf(sdk): parallelize Cloudflare zone API calls with threading
- Add __threading_call__ method to CloudflareService base class
- Refactor zone service to process zones in parallel
- Use ThreadPoolExecutor with 10 workers for concurrent API calls
2026-02-06 12:49:21 +01:00
HugoPBrito
ebd5283975 Merge branch 'master' of https://github.com/prowler-cloud/prowler into PROWLER-820-low-cloudflare-scan-is-slow-for-accounts-with-many-zones 2026-02-06 12:20:27 +01:00
Josema Camacho
ecc8eaf366 feat(skills): create new Attack Packs queries in openCypher (#9975) 2026-02-06 11:57:33 +01:00
Alan Buscaglia
619d1ffc62 chore(ci): remove legacy E2E workflow superseded by optimized v2 (#9977) 2026-02-06 11:20:10 +01:00
Alan Buscaglia
9e20cb2e5a fix(ui): optimize scans page polling to avoid redundant API calls (#9974)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2026-02-06 10:49:15 +01:00
Prowler Bot
cb76e77851 chore(api): Bump version to v1.20.0 (#9968)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-02-05 22:18:33 +01:00
Prowler Bot
a24f818547 chore(release): Bump version to v5.19.0 (#9964)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-02-05 22:17:38 +01:00
Prowler Bot
e07687ce67 docs: Update version to v5.18.0 (#9965)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-02-05 22:16:42 +01:00
Josema Camacho
d016039b18 chore(ui): prepare changelog for v5.18.0 release (#9962) 2026-02-05 13:07:51 +01:00
Daniel Barranquero
ac013ec6fc feat(docs): permission error while deploying docker (#9954) 2026-02-05 11:44:22 +01:00
HugoPBrito
00c57cea8d chore: re run actions 2026-01-27 10:39:13 +01:00
HugoPBrito
1b2c08649e chore: enhancements 2026-01-26 12:51:05 +01:00
HugoPBrito
47317680e5 fix(cloudflare): use zone_name as region for DNS records in CheckReportCloudflare 2026-01-20 15:31:56 +01:00
HugoPBrito
c7558a9f78 chore: remove accidentally committed uv.lock 2026-01-20 14:57:09 +01:00
HugoPBrito
86ecec542b Revert "chore: remove uv.lock files"
This reverts commit cb82a42035.
2026-01-20 14:54:55 +01:00
HugoPBrito
cb82a42035 chore: remove uv.lock files 2026-01-20 14:54:34 +01:00
HugoPBrito
2c69eb58c9 chore(cloudflare): remove unnecessary __init__.py files from tests 2026-01-20 14:54:15 +01:00
HugoPBrito
fcd9e2d40f fix(cloudflare): remove redundant provider assignment and single quotes from status messages 2026-01-20 14:52:00 +01:00
HugoPBrito
f0c69874e0 fix(cloudflare): correct SRV record target extraction for Cloudflare format 2026-01-20 14:49:30 +01:00
HugoPBrito
21444f7880 feat(cloudflare): expand dangling record check to include MX, NS, and SRV
- Dangling MX records can allow mail interception
- Dangling NS records can lead to subdomain delegation takeover
- Dangling SRV records expose service discovery vulnerabilities
2026-01-20 14:35:26 +01:00
HugoPBrito
3e3f56629f feat(cloudflare): expand wildcard check to include MX and SRV records
Wildcard MX records can allow mail interception for arbitrary subdomains.
Wildcard SRV records can expose services on any subdomain.
2026-01-20 14:35:21 +01:00
HugoPBrito
38f6ca9514 fix(cloudflare): get zones directly from API in DNS and Firewall services
This fixes an issue where DNS and Firewall services would have empty
records because they depended on zone_client which might not be
initialized when the services are loaded.
2026-01-20 14:35:14 +01:00
HugoPBrito
7f71b93eec fix(cloudflare): only match OWASP rulesets by name, not by phase 2026-01-20 14:35:08 +01:00
HugoPBrito
12752a5839 chore: add to changelog 2026-01-20 13:58:52 +01:00
HugoPBrito
eb76e2b986 Merge branch 'master' into cloudflare-pr4-dns-firewall-waf
Resolved conflicts by merging both features:
- Kept rate_limit_rules from master
- Kept firewall_rules and waf_rulesets from HEAD
- Updated CloudflareZone to include all three: rate_limit_rules, firewall_rules, waf_rulesets
- Set severity to 'high' for zone_rate_limiting_enabled check
2026-01-16 13:46:34 +01:00
HugoPBrito
42c56fa33a Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf
And add Resourcegroup
2026-01-14 15:43:49 +01:00
HugoPBrito
5dcdeed782 fix: add ResourceGroup to metadata 2026-01-14 15:23:24 +01:00
HugoPBrito
c35eaa8aa9 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2026-01-14 15:17:45 +01:00
HugoPBrito
bca7c3a479 fix: nested folders and add ResourceGroup 2026-01-14 15:16:04 +01:00
Hugo Pereira Brito
e03fb88ca2 chore: update prowler/CHANGELOG.md
Co-authored-by: Andoni Alonso  <14891798+andoniaf@users.noreply.github.com>
2026-01-14 14:50:55 +01:00
HugoPBrito
cecf288d4f chore: add to changelog 2026-01-14 14:48:42 +01:00
HugoPBrito
8c4d251c51 feat: fix zone_bot_fight_mode_enabled and add zone_browser_integrity_check_enabled 2026-01-14 14:36:42 +01:00
HugoPBrito
98d4e08cbb zone_development_mode_disabled 2026-01-14 14:20:08 +01:00
HugoPBrito
3c004582d7 feat: enhance zone_rate_limiting_enabled 2026-01-14 14:19:42 +01:00
HugoPBrito
726aeec64b feat: enhance zone_challenge_passage_configured check 2026-01-14 14:19:08 +01:00
HugoPBrito
3d1a0b1270 feat: enhance zone_challenge_passage_configured check 2026-01-14 14:18:43 +01:00
HugoPBrito
b014fdbde3 chore: remove deprecated check 2026-01-14 13:23:42 +01:00
HugoPBrito
d693a34747 chore: rename zone records checks and add docstrings 2026-01-14 13:17:25 +01:00
HugoPBrito
a6860ffa7d Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf 2026-01-14 13:02:58 +01:00
HugoPBrito
d06af16a5c chore: rename zone records checks and add docstrings 2026-01-14 12:52:21 +01:00
Hugo Pereira Brito
0250bc3b0e Merge branch 'master' into cloudflare-pr2-tls-email-checks 2026-01-14 12:44:24 +01:00
HugoPBrito
ee1e6c35f2 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2026-01-14 12:28:10 +01:00
HugoPBrito
2e552c65a5 chore: add docstrings 2026-01-14 12:19:56 +01:00
HugoPBrito
910514e964 chore: rename zone records checks 2026-01-14 12:02:50 +01:00
HugoPBrito
695a3466cd fix: filter_zones 2026-01-14 11:07:49 +01:00
HugoPBrito
7590ed7913 Merge branch 'master' of https://github.com/prowler-cloud/prowler into cloudflare-pr2-tls-email-checks 2026-01-14 11:02:28 +01:00
HugoPBrito
7fb82b0650 chore: run acions 2026-01-13 15:28:52 +01:00
HugoPBrito
fa21a300fb fix: linter 2026-01-13 11:48:57 +01:00
HugoPBrito
d51fa60e58 feat: add m365 mutelist to labeler.yaml 2026-01-13 11:46:43 +01:00
HugoPBrito
9d69d3a25f feat: add cloudflare to labeler.yaml 2026-01-13 11:44:33 +01:00
HugoPBrito
74da022e48 chore: add version badge to docs 2026-01-13 11:38:52 +01:00
HugoPBrito
da2b6d028b chore: add cloudflare to providers table 2026-01-13 11:38:30 +01:00
HugoPBrito
55d8a5d664 Merge branch 'master' of https://github.com/prowler-cloud/prowler into cloudflare-pr2-tls-email-checks 2026-01-13 11:33:03 +01:00
HugoPBrito
ecc1cf8b04 chore: add to changelog 2026-01-13 11:13:48 +01:00
HugoPBrito
394a62fab1 Merge branch 'master' into cloudflare-pr2-tls-email-checks 2026-01-13 11:10:52 +01:00
HugoPBrito
0ef68f55a1 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2026-01-12 17:28:26 +01:00
HugoPBrito
94b14d1592 Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2026-01-12 17:28:15 +01:00
HugoPBrito
36ac1bc47e fix: revert ui changes 2026-01-12 17:27:42 +01:00
HugoPBrito
41de65ceaa Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2026-01-12 17:25:48 +01:00
HugoPBrito
12f95e3a19 fix: only accept strict 2026-01-12 17:25:09 +01:00
HugoPBrito
f5cada05c3 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2026-01-12 16:25:06 +01:00
HugoPBrito
bfd8abdd89 Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2026-01-12 16:24:52 +01:00
HugoPBrito
99d2736116 chore: enhance status extended 2026-01-12 16:24:40 +01:00
HugoPBrito
b11e074f41 fix: zones_ssl_strict 2026-01-12 16:17:26 +01:00
HugoPBrito
a4e084afc9 chore: add docstrings 2026-01-12 16:17:11 +01:00
HugoPBrito
2bba3efbb1 fix: remove file 2026-01-12 14:09:57 +01:00
HugoPBrito
a2af18885d Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2026-01-12 14:04:13 +01:00
HugoPBrito
edcac4e4bc fix: api peotry lock eof 2026-01-12 11:35:08 +01:00
HugoPBrito
71ed16ee29 chore: restore api poetry lock 2026-01-12 11:33:46 +01:00
HugoPBrito
f2b2f0af95 Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf 2026-01-12 09:56:11 +01:00
HugoPBrito
90ace9265b Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2026-01-09 13:52:37 +01:00
HugoPBrito
773e4b23b5 Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2026-01-09 13:48:37 +01:00
HugoPBrito
9a22b1238a fix: remove config vars 2026-01-09 13:48:26 +01:00
HugoPBrito
80095603fd chore: correct CheckTitle 2026-01-09 13:35:39 +01:00
HugoPBrito
f234c16015 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2026-01-09 11:51:50 +01:00
HugoPBrito
b3b1ee3252 chore: correct CheckTitle 2026-01-09 11:50:38 +01:00
HugoPBrito
3ba5d43c64 Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2026-01-09 11:47:13 +01:00
HugoPBrito
3720f1d235 fix: metadata 2026-01-09 11:46:58 +01:00
HugoPBrito
21868d7741 chore: resolve comment 2026-01-09 11:43:07 +01:00
HugoPBrito
ba4f93ec36 Merge branch 'master' of https://github.com/prowler-cloud/prowler into PROWLER-386-add-cloudflare-provider-to-cli 2026-01-09 11:36:16 +01:00
HugoPBrito
5b00846afe feat: enhance zone record checks 2026-01-09 11:35:56 +01:00
HugoPBrito
a5a5c35f90 Merge branch 'master' of https://github.com/prowler-cloud/prowler into PROWLER-386-add-cloudflare-provider-to-cli 2026-01-08 14:59:27 +01:00
HugoPBrito
5d894dcf94 fix: delete redundant check 2026-01-07 16:19:29 +01:00
HugoPBrito
5c0f9b19b0 chore: rename zone records checks 2026-01-07 16:18:31 +01:00
HugoPBrito
a7d8c8f679 feat: enhance zones_caa_record_exists and zones_universal_ssl_enabled 2026-01-07 16:09:24 +01:00
HugoPBrito
979ae1150c Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2026-01-07 15:36:26 +01:00
HugoPBrito
6d182f3efd chore: remove single quotes from status extended 2026-01-07 15:36:14 +01:00
HugoPBrito
8f937e4530 chore: add to changelog 2026-01-02 12:59:06 +00:00
HugoPBrito
9dd149d20a Merge branch 'master' of https://github.com/prowler-cloud/prowler into PROWLER-386-add-cloudflare-provider-to-cli 2026-01-02 10:04:22 +00:00
HugoPBrito
8e96f8361a Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf 2025-12-16 15:28:04 +01:00
HugoPBrito
0822692903 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2025-12-16 15:27:50 +01:00
HugoPBrito
7c4b814b43 fix: zones_security_level check 2025-12-16 15:27:40 +01:00
HugoPBrito
c0b63e8564 feat: add tests 2025-12-16 14:49:33 +01:00
HugoPBrito
e3ae9b37d0 Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf 2025-12-16 13:59:02 +01:00
HugoPBrito
95087dcba7 feat: add tests 2025-12-16 13:39:31 +01:00
HugoPBrito
346c17b57d Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2025-12-16 13:32:42 +01:00
HugoPBrito
c8af89aa23 feat: add tests 2025-12-16 13:32:08 +01:00
HugoPBrito
1d386e7f27 Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf 2025-12-16 13:09:29 +01:00
HugoPBrito
8e2d2f00e6 fix: iteration and attribute read 2025-12-16 13:03:16 +01:00
HugoPBrito
22d1daf3c4 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2025-12-16 12:42:46 +01:00
HugoPBrito
32f39e2366 fix: iteration and attribute read 2025-12-16 12:42:18 +01:00
HugoPBrito
c7050b1979 Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2025-12-16 12:35:28 +01:00
HugoPBrito
c612637a86 allow --list-checks without authentication 2025-12-16 12:35:15 +01:00
HugoPBrito
f6373387fd Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf 2025-12-16 12:26:42 +01:00
HugoPBrito
b390d6925b Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2025-12-16 12:26:25 +01:00
HugoPBrito
8d76e923cf Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2025-12-16 12:24:58 +01:00
HugoPBrito
25b732655c fix: remove terraform link 2025-12-16 12:24:35 +01:00
HugoPBrito
f8fe1a3655 fix: remove terraform links 2025-12-16 12:23:15 +01:00
HugoPBrito
05a07567b5 fix: logic from regional changes 2025-12-16 12:18:10 +01:00
HugoPBrito
9d658ef531 refactor: remove waf and firewall services
Checks and logic were moved to zones service
2025-12-16 12:00:11 +01:00
HugoPBrito
557b5aa480 feat: add more dns checks 2025-12-16 11:57:24 +01:00
HugoPBrito
c1140dfcc0 Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf 2025-12-16 11:16:07 +01:00
HugoPBrito
49e02bfbd1 chore: enhance metadata 2025-12-16 11:15:33 +01:00
HugoPBrito
f086106d53 Merge branch 'cloudflare-pr2-tls-email-checks' into cloudflare-pr3-bot-config-checks 2025-12-16 10:49:40 +01:00
HugoPBrito
55e55bc7a3 chore: enhance metadata 2025-12-16 10:49:24 +01:00
HugoPBrito
f9efe08984 Merge branch 'PROWLER-386-add-cloudflare-provider-to-cli' into cloudflare-pr2-tls-email-checks 2025-12-16 10:24:54 +01:00
HugoPBrito
5ff390e6fb fix: parser tests 2025-12-16 10:23:05 +01:00
HugoPBrito
72d2ff40f2 chore: remove authentication arguments 2025-12-16 08:20:17 +01:00
HugoPBrito
c667ff91be feat: add tests 2025-12-15 17:58:47 +01:00
HugoPBrito
1e31fe7441 fix: use pydantic v2 2025-12-15 17:34:25 +01:00
HugoPBrito
e4d1d647c5 fix: move logic from provider config to config.yaml 2025-12-15 17:29:35 +01:00
HugoPBrito
4d078aece5 fix: type checking from circular dependency
Forgotten to remove
2025-12-15 17:21:14 +01:00
HugoPBrito
c2c73db4e7 fix: circular dependencies
Enhanced provider logic to avoid conflicting structure
2025-12-15 16:45:20 +01:00
HugoPBrito
6e736fcdac chore: add comment explaining fix 2025-12-10 15:18:46 +01:00
HugoPBrito
5bb5fbe468 chore: resolve github-advanced-security comments 2025-12-10 14:14:30 +01:00
HugoPBrito
c0da0f909f Merge branch 'master' of https://github.com/prowler-cloud/prowler into PROWLER-386-add-cloudflare-provider-to-cli 2025-12-10 14:01:26 +01:00
HugoPBrito
aa2b86f96d feat: add to docs 2025-12-10 13:50:25 +01:00
HugoPBrito
c005959835 fix: checks behavior and flags 2025-12-10 13:49:03 +01:00
HugoPBrito
7ea76a71f7 feat: max retries and avoid conflicting env vars
The Cloudflare SDK automatically reads credentials from environment variables, which causes conflicts. In that case, we prioritize the recommended token auth now.
2025-12-10 13:14:14 +01:00
HugoPBrito
e89e50a3de chore: add zone name to all status extended 2025-12-10 09:59:30 +01:00
HugoPBrito
943c5bf2ea fix: mutelist 2025-12-09 17:57:55 +01:00
HugoPBrito
bac6aa85c0 fix: zones service and checks 2025-12-09 15:28:16 +01:00
HugoPBrito
22540fc0ae chore: adapt metadata to new format 2025-12-09 14:53:58 +01:00
HugoPBrito
3a335583df feat(cloudflare): add DNS, Firewall, and WAF services with checks
Adds additional Cloudflare services to complete the provider:

DNS service:
- dns_records_proxied: Validates DNS records are proxied through Cloudflare

Firewall service:
- firewall_has_blocking_rules: Ensures firewall has blocking rules configured
- firewall_rate_limiting_configured: Validates rate limiting is configured

WAF service:
- waf_owasp_enabled: Validates OWASP ruleset is enabled
2025-12-03 12:36:46 +01:00
HugoPBrito
6668931db7 feat(cloudflare): add bot protection and configuration checks for zones
Adds 9 additional security checks for Cloudflare zones:

Bot protection:
- zones_bot_fight_mode_enabled: Validates Bot Fight Mode is enabled
- zones_waf_enabled: Ensures WAF is enabled for zone
- zones_rate_limiting_enabled: Validates rate limiting is configured

Security configuration:
- zones_challenge_passage_configured: Validates challenge passage settings
- zones_development_mode_disabled: Ensures development mode is disabled
- zones_always_online_disabled: Validates Always Online is properly configured

Content protection:
- zones_hotlink_protection_enabled: Ensures hotlink protection is enabled
- zones_server_side_excludes_enabled: Validates server-side excludes
- zones_ip_geolocation_enabled: Validates IP geolocation is enabled
2025-12-03 12:36:25 +01:00
HugoPBrito
6202b45a97 feat(cloudflare): add TLS/SSL and email security checks for zones
Adds 9 additional security checks for Cloudflare zones:

TLS/SSL checks:
- zones_tls_1_3_enabled: Validates TLS 1.3 is enabled
- zones_hsts_include_subdomains: Ensures HSTS includes subdomains
- zones_automatic_https_rewrites_enabled: Validates automatic HTTPS rewrites
- zones_universal_ssl_enabled: Ensures Universal SSL is enabled

Email security checks:
- zones_dmarc_record_exists: Validates DMARC record exists
- zones_spf_record_exists: Validates SPF record exists
- zones_caa_record_exists: Validates CAA record exists
- zones_email_obfuscation_enabled: Ensures email obfuscation is enabled

Security configuration:
- zones_security_level: Validates security level configuration
2025-12-03 12:35:46 +01:00
HugoPBrito
2636351f5d feat(cloudflare): add Cloudflare provider with zones service and critical security checks
Adds the Cloudflare provider to Prowler with:

Core infrastructure:
- CloudflareProvider with API token authentication
- Zones service for fetching zone configurations
- CLI integration (parser arguments, outputs)
- Mutelist support and config files

Critical security checks (5):
- zones_ssl_strict: Ensures SSL/TLS encryption mode is strict
- zones_min_tls_version_secure: Ensures minimum TLS 1.2
- zones_dnssec_enabled: Validates DNSSEC is enabled
- zones_https_redirect_enabled: Ensures automatic HTTPS redirect
- zones_hsts_enabled: Validates HTTP Strict Transport Security
2025-12-03 12:35:25 +01:00
24 changed files with 862 additions and 326 deletions

View File

@@ -14,7 +14,7 @@ ignored:
- "*.md"
- "**/*.md"
- mkdocs.yml
# Config files that don't affect runtime
- .gitignore
- .gitattributes
@@ -23,7 +23,7 @@ ignored:
- .backportrc.json
- CODEOWNERS
- LICENSE
# IDE/Editor configs
- .vscode/**
- .idea/**
@@ -31,10 +31,13 @@ ignored:
# Examples and contrib (not production code)
- examples/**
- contrib/**
# Skills (AI agent configs, not runtime)
- skills/**
# E2E setup helpers (not runnable tests)
- ui/tests/setups/**
# Permissions docs
- permissions/**
@@ -47,18 +50,18 @@ critical:
- prowler/config/**
- prowler/exceptions/**
- prowler/providers/common/**
# API Core
- api/src/backend/api/models.py
- api/src/backend/config/**
- api/src/backend/conftest.py
# UI Core
- ui/lib/**
- ui/types/**
- ui/config/**
- ui/middleware.ts
# CI/CD changes
- .github/workflows/**
- .github/test-impact.yml

View File

@@ -25,7 +25,7 @@ jobs:
e2e-tests:
needs: impact-analysis
if: |
github.repository == 'prowler-cloud/prowler' &&
github.repository == 'prowler-cloud/prowler' &&
(needs.impact-analysis.outputs.has-ui-e2e == 'true' || needs.impact-analysis.outputs.run-all == 'true')
runs-on: ubuntu-latest
env:
@@ -200,7 +200,14 @@ jobs:
# e.g., "ui/tests/providers/**" -> "tests/providers"
TEST_PATHS="${{ env.E2E_TEST_PATHS }}"
# Remove ui/ prefix and convert ** to empty (playwright handles recursion)
TEST_PATHS=$(echo "$TEST_PATHS" | sed 's|ui/||g' | sed 's|\*\*||g' | tr ' ' '\n' | sort -u | tr '\n' ' ')
TEST_PATHS=$(echo "$TEST_PATHS" | sed 's|ui/||g' | sed 's|\*\*||g' | tr ' ' '\n' | sort -u)
# Drop auth setup helpers (not runnable test suites)
TEST_PATHS=$(echo "$TEST_PATHS" | grep -v '^tests/setups/')
if [[ -z "$TEST_PATHS" ]]; then
echo "No runnable E2E test paths after filtering setups"
exit 0
fi
TEST_PATHS=$(echo "$TEST_PATHS" | tr '\n' ' ')
echo "Resolved test paths: $TEST_PATHS"
pnpm exec playwright test $TEST_PATHS
fi
@@ -222,8 +229,8 @@ jobs:
skip-e2e:
needs: impact-analysis
if: |
github.repository == 'prowler-cloud/prowler' &&
needs.impact-analysis.outputs.has-ui-e2e != 'true' &&
github.repository == 'prowler-cloud/prowler' &&
needs.impact-analysis.outputs.has-ui-e2e != 'true' &&
needs.impact-analysis.outputs.run-all != 'true'
runs-on: ubuntu-latest
steps:

View File

@@ -1,172 +0,0 @@
name: UI - E2E Tests
on:
pull_request:
branches:
- master
- "v5.*"
paths:
- '.github/workflows/ui-e2e-tests.yml'
- 'ui/**'
jobs:
e2e-tests:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
env:
AUTH_SECRET: 'fallback-ci-secret-for-testing'
AUTH_TRUST_HOST: true
NEXTAUTH_URL: 'http://localhost:3000'
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
E2E_ADMIN_USER: ${{ secrets.E2E_ADMIN_USER }}
E2E_ADMIN_PASSWORD: ${{ secrets.E2E_ADMIN_PASSWORD }}
E2E_AWS_PROVIDER_ACCOUNT_ID: ${{ secrets.E2E_AWS_PROVIDER_ACCOUNT_ID }}
E2E_AWS_PROVIDER_ACCESS_KEY: ${{ secrets.E2E_AWS_PROVIDER_ACCESS_KEY }}
E2E_AWS_PROVIDER_SECRET_KEY: ${{ secrets.E2E_AWS_PROVIDER_SECRET_KEY }}
E2E_AWS_PROVIDER_ROLE_ARN: ${{ secrets.E2E_AWS_PROVIDER_ROLE_ARN }}
E2E_AZURE_SUBSCRIPTION_ID: ${{ secrets.E2E_AZURE_SUBSCRIPTION_ID }}
E2E_AZURE_CLIENT_ID: ${{ secrets.E2E_AZURE_CLIENT_ID }}
E2E_AZURE_SECRET_ID: ${{ secrets.E2E_AZURE_SECRET_ID }}
E2E_AZURE_TENANT_ID: ${{ secrets.E2E_AZURE_TENANT_ID }}
E2E_M365_DOMAIN_ID: ${{ secrets.E2E_M365_DOMAIN_ID }}
E2E_M365_CLIENT_ID: ${{ secrets.E2E_M365_CLIENT_ID }}
E2E_M365_SECRET_ID: ${{ secrets.E2E_M365_SECRET_ID }}
E2E_M365_TENANT_ID: ${{ secrets.E2E_M365_TENANT_ID }}
E2E_M365_CERTIFICATE_CONTENT: ${{ secrets.E2E_M365_CERTIFICATE_CONTENT }}
E2E_KUBERNETES_CONTEXT: 'kind-kind'
E2E_KUBERNETES_KUBECONFIG_PATH: /home/runner/.kube/config
E2E_GCP_BASE64_SERVICE_ACCOUNT_KEY: ${{ secrets.E2E_GCP_BASE64_SERVICE_ACCOUNT_KEY }}
E2E_GCP_PROJECT_ID: ${{ secrets.E2E_GCP_PROJECT_ID }}
E2E_GITHUB_APP_ID: ${{ secrets.E2E_GITHUB_APP_ID }}
E2E_GITHUB_BASE64_APP_PRIVATE_KEY: ${{ secrets.E2E_GITHUB_BASE64_APP_PRIVATE_KEY }}
E2E_GITHUB_USERNAME: ${{ secrets.E2E_GITHUB_USERNAME }}
E2E_GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.E2E_GITHUB_PERSONAL_ACCESS_TOKEN }}
E2E_GITHUB_ORGANIZATION: ${{ secrets.E2E_GITHUB_ORGANIZATION }}
E2E_GITHUB_ORGANIZATION_ACCESS_TOKEN: ${{ secrets.E2E_GITHUB_ORGANIZATION_ACCESS_TOKEN }}
E2E_ORGANIZATION_ID: ${{ secrets.E2E_ORGANIZATION_ID }}
E2E_OCI_TENANCY_ID: ${{ secrets.E2E_OCI_TENANCY_ID }}
E2E_OCI_USER_ID: ${{ secrets.E2E_OCI_USER_ID }}
E2E_OCI_FINGERPRINT: ${{ secrets.E2E_OCI_FINGERPRINT }}
E2E_OCI_KEY_CONTENT: ${{ secrets.E2E_OCI_KEY_CONTENT }}
E2E_OCI_REGION: ${{ secrets.E2E_OCI_REGION }}
E2E_NEW_USER_PASSWORD: ${{ secrets.E2E_NEW_USER_PASSWORD }}
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1
with:
cluster_name: kind
- name: Modify kubeconfig
run: |
# Modify the kubeconfig to use the kind cluster server to https://kind-control-plane:6443
# from worker service into docker-compose.yml
kubectl config set-cluster kind-kind --server=https://kind-control-plane:6443
kubectl config view
- name: Add network kind to docker compose
run: |
# Add the network kind to the docker compose to interconnect to kind cluster
yq -i '.networks.kind.external = true' docker-compose.yml
# Add network kind to worker service and default network too
yq -i '.services.worker.networks = ["kind","default"]' docker-compose.yml
- name: Fix API data directory permissions
run: docker run --rm -v $(pwd)/_data/api:/data alpine chown -R 1000:1000 /data
- name: Add AWS credentials for testing AWS SDK Default Adding Provider
run: |
echo "Adding AWS credentials for testing AWS SDK Default Adding Provider..."
echo "AWS_ACCESS_KEY_ID=${{ secrets.E2E_AWS_PROVIDER_ACCESS_KEY }}" >> .env
echo "AWS_SECRET_ACCESS_KEY=${{ secrets.E2E_AWS_PROVIDER_SECRET_KEY }}" >> .env
- name: Start API services
run: |
# Override docker-compose image tag to use latest instead of stable
# This overrides any PROWLER_API_VERSION set in .env file
export PROWLER_API_VERSION=latest
echo "Using PROWLER_API_VERSION=${PROWLER_API_VERSION}"
docker compose up -d api worker worker-beat
- name: Wait for API to be ready
run: |
echo "Waiting for prowler-api..."
timeout=150 # 5 minutes max
elapsed=0
while [ $elapsed -lt $timeout ]; do
if curl -s ${NEXT_PUBLIC_API_BASE_URL}/docs >/dev/null 2>&1; then
echo "Prowler API is ready!"
exit 0
fi
echo "Waiting for prowler-api... (${elapsed}s elapsed)"
sleep 5
elapsed=$((elapsed + 5))
done
echo "Timeout waiting for prowler-api to start"
exit 1
- name: Load database fixtures for E2E tests
run: |
docker compose exec -T api sh -c '
echo "Loading all fixtures from api/fixtures/dev/..."
for fixture in api/fixtures/dev/*.json; do
if [ -f "$fixture" ]; then
echo "Loading $fixture"
poetry run python manage.py loaddata "$fixture" --database admin
fi
done
echo "All database fixtures loaded successfully!"
'
- name: Setup Node.js environment
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: '24.13.0'
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 10
run_install: false
- name: Get pnpm store directory
shell: bash
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Setup pnpm and Next.js cache
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: |
${{ env.STORE_PATH }}
./ui/node_modules
./ui/.next/cache
key: ${{ runner.os }}-pnpm-nextjs-${{ hashFiles('ui/pnpm-lock.yaml') }}-${{ hashFiles('ui/**/*.ts', 'ui/**/*.tsx', 'ui/**/*.js', 'ui/**/*.jsx') }}
restore-keys: |
${{ runner.os }}-pnpm-nextjs-${{ hashFiles('ui/pnpm-lock.yaml') }}-
${{ runner.os }}-pnpm-nextjs-
- name: Install UI dependencies
working-directory: ./ui
run: pnpm install --frozen-lockfile --prefer-offline
- name: Build UI application
working-directory: ./ui
run: pnpm run build
- name: Cache Playwright browsers
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: playwright-cache
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-playwright-
- name: Install Playwright browsers
working-directory: ./ui
if: steps.playwright-cache.outputs.cache-hit != 'true'
run: pnpm run test:e2e:install
- name: Run E2E tests
working-directory: ./ui
run: pnpm run test:e2e
- name: Upload test reports
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
if: failure()
with:
name: playwright-report
path: ui/playwright-report/
retention-days: 30
- name: Cleanup services
if: always()
run: |
echo "Shutting down services..."
docker compose down -v || true
echo "Cleanup completed"

View File

@@ -44,6 +44,7 @@ Use these skills for detailed patterns on-demand:
| `prowler-commit` | Professional commits (conventional-commits) | [SKILL.md](skills/prowler-commit/SKILL.md) |
| `prowler-pr` | Pull request conventions | [SKILL.md](skills/prowler-pr/SKILL.md) |
| `prowler-docs` | Documentation style guide | [SKILL.md](skills/prowler-docs/SKILL.md) |
| `prowler-attack-paths-query` | Create Attack Paths openCypher queries | [SKILL.md](skills/prowler-attack-paths-query/SKILL.md) |
| `skill-creator` | Create new AI agent skills | [SKILL.md](skills/skill-creator/SKILL.md) |
### Auto-invoke Skills
@@ -56,6 +57,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
| Adding DRF pagination or permissions | `django-drf` |
| Adding new providers | `prowler-provider` |
| Adding services to existing providers | `prowler-provider` |
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
| After creating/modifying a skill | `skill-sync` |
| App Router / Server Actions | `nextjs-15` |
| Building AI chat features | `ai-sdk-5` |
@@ -63,6 +65,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
| Create PR that requires changelog entry | `prowler-changelog` |
| Create a PR with gh pr create | `prowler-pr` |
| Creating API endpoints | `jsonapi` |
| Creating Attack Paths queries | `prowler-attack-paths-query` |
| Creating ViewSets, serializers, or filters in api/ | `django-drf` |
| Creating Zod schemas | `zod-4` |
| Creating a git commit | `prowler-commit` |
@@ -92,6 +95,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
| Understand changelog gate and no-changelog label behavior | `prowler-ci` |
| Understand review ownership with CODEOWNERS | `prowler-pr` |
| Update CHANGELOG.md in any component | `prowler-changelog` |
| Updating existing Attack Paths queries | `prowler-attack-paths-query` |
| Updating existing checks and metadata | `prowler-sdk-check` |
| Using Zustand stores | `zustand-5` |
| Working on MCP server tools | `prowler-mcp` |

View File

@@ -3,6 +3,7 @@
> **Skills Reference**: For detailed patterns, use these skills:
> - [`prowler-api`](../skills/prowler-api/SKILL.md) - Models, Serializers, Views, RLS patterns
> - [`prowler-test-api`](../skills/prowler-test-api/SKILL.md) - Testing patterns (pytest-django)
> - [`prowler-attack-paths-query`](../skills/prowler-attack-paths-query/SKILL.md) - Attack Paths openCypher queries
> - [`django-drf`](../skills/django-drf/SKILL.md) - Generic DRF patterns
> - [`jsonapi`](../skills/jsonapi/SKILL.md) - Strict JSON:API v1.1 spec compliance
> - [`pytest`](../skills/pytest/SKILL.md) - Generic pytest patterns
@@ -15,9 +16,11 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|--------|-------|
| Add changelog entry for a PR or feature | `prowler-changelog` |
| Adding DRF pagination or permissions | `django-drf` |
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
| Committing changes | `prowler-commit` |
| Create PR that requires changelog entry | `prowler-changelog` |
| Creating API endpoints | `jsonapi` |
| Creating Attack Paths queries | `prowler-attack-paths-query` |
| Creating ViewSets, serializers, or filters in api/ | `django-drf` |
| Creating a git commit | `prowler-commit` |
| Creating/modifying models, views, serializers | `prowler-api` |
@@ -27,6 +30,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
| Reviewing JSON:API compliance | `jsonapi` |
| Testing RLS tenant isolation | `prowler-test-api` |
| Update CHANGELOG.md in any component | `prowler-changelog` |
| Updating existing Attack Paths queries | `prowler-attack-paths-query` |
| Writing Prowler API tests | `prowler-test-api` |
| Writing Python tests with pytest | `pytest` |

View File

@@ -49,7 +49,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.19.0"
version = "1.20.0"
[project.scripts]
celery = "src.backend.config.settings.celery"

View File

@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.19.0
version: 1.20.0
description: |-
Prowler API specification.

View File

@@ -392,7 +392,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.19.0"
spectacular_settings.VERSION = "1.20.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)

View File

@@ -115,8 +115,8 @@ To update the environment file:
Edit the `.env` file and change version values:
```env
PROWLER_UI_VERSION="5.17.0"
PROWLER_API_VERSION="5.17.0"
PROWLER_UI_VERSION="5.18.0"
PROWLER_API_VERSION="5.18.0"
```
<Note>

View File

@@ -86,3 +86,81 @@ docker compose up -d
<Note>
We are evaluating adding these values to the default `docker-compose.yml` to avoid this issue in future releases.
</Note>
### API Container Fails to Start with JWT Key Permission Error
See [GitHub Issue #8897](https://github.com/prowler-cloud/prowler/issues/8897) for more details.
When deploying Prowler via Docker Compose on a fresh installation, the API container may fail to start with permission errors related to JWT RSA key file generation. This issue is commonly observed on Linux systems (Ubuntu, Debian, cloud VMs) and Windows with Docker Desktop, but not typically on macOS.
**Error Message:**
Checking the API container logs reveals:
```bash
PermissionError: [Errno 13] Permission denied: '/home/prowler/.config/prowler-api/jwt_private.pem'
```
Or:
```bash
Token generation failed due to invalid key configuration. Provide valid DJANGO_TOKEN_SIGNING_KEY and DJANGO_TOKEN_VERIFYING_KEY in the environment.
```
**Root Cause:**
This permission mismatch occurs due to UID (User ID) mapping between the host system and Docker containers:
* The API container runs as user `prowler` with UID/GID 1000
* In environments like WSL2, the host user may have a different UID than the container user
* Docker creates the mounted volume directory `./_data/api` on the host, often with the host user's UID or root ownership (UID 0)
* When the application attempts to write JWT key files (`jwt_private.pem` and `jwt_public.pem`), the operation fails because the container's UID 1000 does not have write permissions to the host-owned directory
**Solutions:**
There are two approaches to resolve this issue:
**Option 1: Fix Volume Ownership (Resolve UID Mapping)**
Change the ownership of the volume directory to match the container user's UID (1000):
```bash
# The container user 'prowler' has UID 1000
# This command changes the directory ownership to UID 1000
sudo chown -R 1000:1000 ./_data/api
```
Then start Docker Compose:
```bash
docker compose up -d
```
This solution directly addresses the UID mapping mismatch by ensuring the volume directory is owned by the same UID that the container process uses.
**Option 2: Use Environment Variables (Skip File Storage)**
Generate JWT RSA keys manually and provide them via environment variables to bypass file-based key storage entirely:
```bash
# Generate RSA keys
openssl genrsa -out jwt_private.pem 4096
openssl rsa -in jwt_private.pem -pubout -out jwt_public.pem
# Extract key content (removes headers/footers and newlines)
PRIVATE_KEY=$(awk 'NF {sub(/\r/, ""); printf "%s\\n",$0;}' jwt_private.pem)
PUBLIC_KEY=$(awk 'NF {sub(/\r/, ""); printf "%s\\n",$0;}' jwt_public.pem)
```
Add the following to the `.env` file:
```env
DJANGO_TOKEN_SIGNING_KEY=<content of jwt_private.pem>
DJANGO_TOKEN_VERIFYING_KEY=<content of jwt_public.pem>
```
When these environment variables are set, the API will use them directly instead of attempting to write key files to the mounted volume.
<Note>
A fix addressing this permission issue is being evaluated in [PR #9953](https://github.com/prowler-cloud/prowler/pull/9953).
</Note>

View File

@@ -2,6 +2,18 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [5.19.0] (Prowler UNRELEASED)
### 🚀 Added
- AI Skills: Added a skill for creating new Attack Paths queries in openCypher, compatible with Neo4j and Neptune [(#9975)](https://github.com/prowler-cloud/prowler/pull/9975)
### 🔄 Changed
- Parallelize Cloudflare zone API calls with threading to improve scan performance [(#9982)](https://github.com/prowler-cloud/prowler/pull/9982)
---
## [5.18.0] (Prowler v5.18.0)
### 🚀 Added

View File

@@ -38,7 +38,7 @@ class _MutableTimestamp:
timestamp = _MutableTimestamp(datetime.today())
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
prowler_version = "5.18.0"
prowler_version = "5.19.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -1,5 +1,9 @@
from concurrent.futures import ThreadPoolExecutor, as_completed
from prowler.providers.cloudflare.cloudflare_provider import CloudflareProvider
MAX_WORKERS = 10
class CloudflareService:
"""Base class for Cloudflare services to share provider context."""
@@ -10,3 +14,23 @@ class CloudflareService:
self.audit_config = provider.audit_config
self.fixer_config = provider.fixer_config
self.service = service.lower() if not service.islower() else service
# Thread pool for __threading_call__
self.thread_pool = ThreadPoolExecutor(max_workers=MAX_WORKERS)
def __threading_call__(self, call, iterator):
"""Execute a function across multiple items using threading."""
items = list(iterator) if not isinstance(iterator, list) else iterator
futures = {self.thread_pool.submit(call, item): item for item in items}
results = []
for future in as_completed(futures):
try:
result = future.result()
if result is not None:
results.append(result)
except Exception:
pass
return results

View File

@@ -49,13 +49,13 @@ class Zone(CloudflareService):
super().__init__(__class__.__name__, provider)
self.zones: dict[str, "CloudflareZone"] = {}
self._list_zones()
self._get_zones_settings()
self._get_zones_dnssec()
self._get_zones_universal_ssl()
self._get_zones_rate_limit_rules()
self._get_zones_bot_management()
self._get_zones_firewall_rules()
self._get_zones_waf_rulesets()
self.__threading_call__(self._get_zone_settings_threaded, self.zones.values())
self.__threading_call__(self._get_zone_dnssec, self.zones.values())
self.__threading_call__(self._get_zone_universal_ssl, self.zones.values())
self.__threading_call__(self._get_zone_rate_limit_rules, self.zones.values())
self.__threading_call__(self._get_zone_bot_management, self.zones.values())
self.__threading_call__(self._get_zone_firewall_rules, self.zones.values())
self.__threading_call__(self._get_zone_waf_rulesets, self.zones.values())
def _list_zones(self) -> None:
"""List all Cloudflare zones with their basic information."""
@@ -124,110 +124,89 @@ class Zone(CloudflareService):
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zones_settings(self) -> None:
"""Get settings for all zones."""
logger.info("Zone - Getting zone settings...")
for zone in self.zones.values():
try:
zone.settings = self._get_zone_settings(zone.id)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zone_settings_threaded(self, zone: "CloudflareZone") -> None:
"""Get settings for a single zone (thread-safe)."""
try:
zone.settings = self._get_zone_settings(zone.id)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zones_dnssec(self) -> None:
"""Get DNSSEC status for all zones."""
logger.info("Zone - Getting DNSSEC status...")
for zone in self.zones.values():
try:
dnssec = self.client.dns.dnssec.get(zone_id=zone.id)
zone.dnssec_status = getattr(dnssec, "status", None)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zone_dnssec(self, zone: "CloudflareZone") -> None:
"""Get DNSSEC status for a single zone."""
try:
dnssec = self.client.dns.dnssec.get(zone_id=zone.id)
zone.dnssec_status = getattr(dnssec, "status", None)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zones_universal_ssl(self) -> None:
"""Get Universal SSL settings for all zones."""
logger.info("Zone - Getting Universal SSL settings...")
for zone in self.zones.values():
try:
universal_ssl = self.client.ssl.universal.settings.get(zone_id=zone.id)
zone.settings.universal_ssl_enabled = getattr(
universal_ssl, "enabled", False
)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zone_universal_ssl(self, zone: "CloudflareZone") -> None:
"""Get Universal SSL settings for a single zone."""
try:
universal_ssl = self.client.ssl.universal.settings.get(zone_id=zone.id)
zone.settings.universal_ssl_enabled = getattr(
universal_ssl, "enabled", False
)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zones_rate_limit_rules(self) -> None:
"""Get rate limiting rules for all zones."""
logger.info("Zone - Getting rate limit rules...")
for zone in self.zones.values():
try:
seen_ruleset_ids: set[str] = set()
for ruleset in self.client.rulesets.list(zone_id=zone.id):
ruleset_id = getattr(ruleset, "id", "")
if ruleset_id in seen_ruleset_ids:
break
seen_ruleset_ids.add(ruleset_id)
def _get_zone_rate_limit_rules(self, zone: "CloudflareZone") -> None:
"""Get rate limiting rules for a single zone."""
try:
seen_ruleset_ids: set[str] = set()
for ruleset in self.client.rulesets.list(zone_id=zone.id):
ruleset_id = getattr(ruleset, "id", "")
if ruleset_id in seen_ruleset_ids:
break
seen_ruleset_ids.add(ruleset_id)
phase = getattr(ruleset, "phase", "")
if phase == "http_ratelimit":
try:
ruleset_detail = self.client.rulesets.get(
ruleset_id=ruleset_id, zone_id=zone.id
)
rules = getattr(ruleset_detail, "rules", []) or []
seen_rule_ids: set[str] = set()
for rule in rules:
rule_id = getattr(rule, "id", "")
if rule_id in seen_rule_ids:
break
seen_rule_ids.add(rule_id)
zone.rate_limit_rules.append(
CloudflareRateLimitRule(
id=rule_id,
description=getattr(rule, "description", None),
action=getattr(rule, "action", None),
enabled=getattr(rule, "enabled", True),
expression=getattr(rule, "expression", None),
)
phase = getattr(ruleset, "phase", "")
if phase == "http_ratelimit":
try:
ruleset_detail = self.client.rulesets.get(
ruleset_id=ruleset_id, zone_id=zone.id
)
rules = getattr(ruleset_detail, "rules", []) or []
seen_rule_ids: set[str] = set()
for rule in rules:
rule_id = getattr(rule, "id", "")
if rule_id in seen_rule_ids:
break
seen_rule_ids.add(rule_id)
zone.rate_limit_rules.append(
CloudflareRateLimitRule(
id=rule_id,
description=getattr(rule, "description", None),
action=getattr(rule, "action", None),
enabled=getattr(rule, "enabled", True),
expression=getattr(rule, "expression", None),
)
except Exception as error:
logger.debug(
f"{zone.id} ruleset {ruleset_id} -- {error.__class__.__name__}: {error}"
)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.debug(
f"{zone.id} ruleset {ruleset_id} -- {error.__class__.__name__}: {error}"
)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zones_bot_management(self) -> None:
"""Get Bot Management settings for all zones."""
logger.info("Zone - Getting Bot Management settings...")
for zone in self.zones.values():
try:
bot_management = self.client.bot_management.get(zone_id=zone.id)
zone.settings.bot_fight_mode_enabled = getattr(
bot_management, "fight_mode", False
)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zones_firewall_rules(self) -> None:
"""Get firewall rules for all zones."""
logger.info("Zone - Getting firewall rules...")
for zone in self.zones.values():
try:
self._get_zone_firewall_rules(zone)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zone_bot_management(self, zone: "CloudflareZone") -> None:
"""Get Bot Management settings for a single zone."""
try:
bot_management = self.client.bot_management.get(zone_id=zone.id)
zone.settings.bot_fight_mode_enabled = getattr(
bot_management, "fight_mode", False
)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zone_firewall_rules(self, zone: "CloudflareZone") -> None:
"""List firewall rules from custom rulesets for a zone."""
@@ -282,17 +261,6 @@ class Zone(CloudflareService):
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zones_waf_rulesets(self) -> None:
"""Get WAF rulesets for all zones."""
logger.info("Zone - Getting WAF rulesets...")
for zone in self.zones.values():
try:
self._get_zone_waf_rulesets(zone)
except Exception as error:
logger.error(
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_zone_waf_rulesets(self, zone: "CloudflareZone") -> None:
"""List WAF rulesets for a zone using the rulesets API."""
seen_ids: set[str] = set()

View File

@@ -92,7 +92,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">3.9.1,<3.13"
version = "5.18.0"
version = "5.19.0"
[project.scripts]
prowler = "prowler.__main__:prowler"

View File

@@ -77,6 +77,7 @@ Patterns tailored for Prowler development:
| `prowler-provider` | Add new cloud providers |
| `prowler-pr` | Pull request conventions |
| `prowler-docs` | Documentation style guide |
| `prowler-attack-paths-query` | Create Attack Paths openCypher queries |
### Meta Skills

View File

@@ -0,0 +1,479 @@
---
name: prowler-attack-paths-query
description: >
Creates Prowler Attack Paths openCypher queries for graph analysis (compatible with Neo4j and Neptune).
Trigger: When creating or updating Attack Paths queries that detect privilege escalation paths,
network exposure, or security misconfigurations in cloud environments.
license: Apache-2.0
metadata:
author: prowler-cloud
version: "1.0"
scope: [root, api]
auto_invoke:
- "Creating Attack Paths queries"
- "Updating existing Attack Paths queries"
- "Adding privilege escalation detection queries"
allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, Task
---
## Overview
Attack Paths queries are openCypher queries that analyze cloud infrastructure graphs
(ingested via Cartography) to detect security risks like privilege escalation paths,
network exposure, and misconfigurations.
Queries are written in **openCypher Version 9** to ensure compatibility with both Neo4j and Amazon Neptune.
---
## Input Sources
Queries can be created from:
1. **pathfinding.cloud ID** (e.g., `ECS-001`, `GLUE-001`)
- The JSON index contains: `id`, `name`, `description`, `services`, `permissions`, `exploitationSteps`, `prerequisites`, etc.
- Reference: https://github.com/DataDog/pathfinding.cloud
**Fetching a single path by ID** — The aggregated `paths.json` is too large for WebFetch
(content gets truncated). Use Bash with `curl` and a JSON parser instead:
Prefer `jq` (concise), fall back to `python3` (guaranteed in this Python project):
```bash
# With jq
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
| jq '.[] | select(.id == "ecs-002")'
# With python3 (fallback)
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
| python3 -c "import json,sys; print(json.dumps(next((p for p in json.load(sys.stdin) if p['id']=='ecs-002'), None), indent=2))"
```
2. **Listing Available Attack Paths**
- Use Bash to list available paths from the JSON index:
```bash
# List all path IDs and names (jq)
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
| jq -r '.[] | "\(.id): \(.name)"'
# List all path IDs and names (python3 fallback)
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
| python3 -c "import json,sys; [print(f\"{p['id']}: {p['name']}\") for p in json.load(sys.stdin)]"
# List paths filtered by service prefix
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
| jq -r '.[] | select(.id | startswith("ecs")) | "\(.id): \(.name)"'
```
3. **Natural Language Description**
- User describes the Attack Paths in plain language
- Agent maps to appropriate openCypher patterns
---
## Query Structure
### File Location
```
api/src/backend/api/attack_paths/queries/{provider}.py
```
Example: `api/src/backend/api/attack_paths/queries/aws.py`
### Query Definition Pattern
```python
from api.attack_paths.queries.types import (
AttackPathsQueryDefinition,
AttackPathsQueryParameterDefinition,
)
from tasks.jobs.attack_paths.config import PROWLER_FINDING_LABEL
# {REFERENCE_ID} (e.g., EC2-001, GLUE-001)
AWS_{QUERY_NAME} = AttackPathsQueryDefinition(
id="aws-{kebab-case-name}",
name="Privilege Escalation: {permission1} + {permission2}",
description="{Detailed description of the Attack Paths}.",
provider="aws",
cypher=f"""
// Find principals with {permission1}
MATCH path_principal = (aws:AWSAccount {{id: $provider_uid}})--(principal:AWSPrincipal)--(policy:AWSPolicy)--(stmt:AWSPolicyStatement)
WHERE stmt.effect = 'Allow'
AND any(action IN stmt.action WHERE
toLower(action) = '{permission1_lowercase}'
OR toLower(action) = '{service}:*'
OR action = '*'
)
// Find {permission2}
MATCH (principal)--(policy2:AWSPolicy)--(stmt2:AWSPolicyStatement)
WHERE stmt2.effect = 'Allow'
AND any(action IN stmt2.action WHERE
toLower(action) = '{permission2_lowercase}'
OR toLower(action) = '{service2}:*'
OR action = '*'
)
// Find target resources
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: '{service}.amazonaws.com'}})
WHERE any(resource IN stmt.resource WHERE
resource = '*'
OR target_role.arn CONTAINS resource
OR resource CONTAINS target_role.name
)
UNWIND nodes(path_principal) + nodes(path_target) as n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN path_principal, path_target,
collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
parameters=[],
)
```
### Register in Query List
Add to the `{PROVIDER}_QUERIES` list at the bottom of the file:
```python
AWS_QUERIES: list[AttackPathsQueryDefinition] = [
# ... existing queries ...
AWS_{NEW_QUERY_NAME}, # Add here
]
```
---
## Step-by-Step Creation Process
### 1. Read the Queries Module
**FIRST**, read all files in the queries module to understand the structure:
```
api/src/backend/api/attack_paths/queries/
├── __init__.py # Module exports
├── types.py # AttackPathsQueryDefinition, AttackPathsQueryParameterDefinition
├── registry.py # Query registry logic
└── {provider}.py # Provider-specific queries (e.g., aws.py)
```
Read these files to learn:
- Type definitions and available fields
- How queries are registered
- Current query patterns, style, and naming conventions
### 2. Determine Schema Source
Check the Cartography dependency in `api/pyproject.toml`:
```bash
grep cartography api/pyproject.toml
```
Parse the dependency to determine the schema source:
**If git-based dependency** (e.g., `cartography @ git+https://github.com/prowler-cloud/cartography@0.126.1`):
- Extract the repository (e.g., `prowler-cloud/cartography`)
- Extract the version/tag (e.g., `0.126.1`)
- Fetch schema from that repository at that tag
**If PyPI dependency** (e.g., `cartography = "^0.126.0"` or `cartography>=0.126.0`):
- Extract the version (e.g., `0.126.0`)
- Use the official `cartography-cncf` repository
**Schema URL patterns** (ALWAYS use the specific version tag, not master/main):
```
# Official Cartography (cartography-cncf)
https://raw.githubusercontent.com/cartography-cncf/cartography/refs/tags/{version}/docs/root/modules/{provider}/schema.md
# Prowler fork (prowler-cloud)
https://raw.githubusercontent.com/prowler-cloud/cartography/refs/tags/{version}/docs/root/modules/{provider}/schema.md
```
**Examples**:
```bash
# For prowler-cloud/cartography@0.126.1 (git), fetch AWS schema:
https://raw.githubusercontent.com/prowler-cloud/cartography/refs/tags/0.126.1/docs/root/modules/aws/schema.md
# For cartography = "^0.126.0" (PyPI), fetch AWS schema:
https://raw.githubusercontent.com/cartography-cncf/cartography/refs/tags/0.126.0/docs/root/modules/aws/schema.md
```
**IMPORTANT**: Always match the schema version to the dependency version in `pyproject.toml`. Using master/main may reference node labels or properties that don't exist in the deployed version.
**Additional Prowler Labels**: The Attack Paths sync task adds extra labels:
- `ProwlerFinding` - Prowler finding nodes with `status`, `provider_uid` properties
- `ProviderResource` - Generic resource marker
- `{Provider}Resource` - Provider-specific marker (e.g., `AWSResource`)
These are defined in `api/src/backend/tasks/jobs/attack_paths/config.py`.
### 3. Consult the Schema for Available Data
Use the Cartography schema to discover:
- What node labels exist for the target resources
- What properties are available on those nodes
- What relationships connect the nodes
This informs query design by showing what data is actually available to query.
### 4. Create Query Definition
Use the standard pattern (see above) with:
- **id**: Auto-generated as `{provider}-{kebab-case-description}`
- **name**: Human-readable, e.g., "Privilege Escalation: {perm1} + {perm2}"
- **description**: Explain the attack vector and impact
- **provider**: Provider identifier (aws, azure, gcp, kubernetes, github)
- **cypher**: The openCypher query with proper escaping
- **parameters**: Optional list of user-provided parameters (use `parameters=[]` if none needed)
### 5. Add Query to Provider List
Add the constant to the `{PROVIDER}_QUERIES` list.
---
## Query Naming Conventions
### Query ID
```
{provider}-{category}-{description}
```
Examples:
- `aws-ec2-privesc-passrole-iam`
- `aws-iam-privesc-attach-role-policy-assume-role`
- `aws-rds-unencrypted-storage`
### Query Constant Name
```
{PROVIDER}_{CATEGORY}_{DESCRIPTION}
```
Examples:
- `AWS_EC2_PRIVESC_PASSROLE_IAM`
- `AWS_IAM_PRIVESC_ATTACH_ROLE_POLICY_ASSUME_ROLE`
- `AWS_RDS_UNENCRYPTED_STORAGE`
---
## Query Categories
| Category | Description | Example |
| -------------------- | ------------------------------ | ------------------------- |
| Basic Resource | List resources with properties | RDS instances, S3 buckets |
| Network Exposure | Internet-exposed resources | EC2 with public IPs |
| Privilege Escalation | IAM privilege escalation paths | PassRole + RunInstances |
| Data Access | Access to sensitive data | EC2 with S3 access |
---
## Common openCypher Patterns
### Match Account and Principal
```cypher
MATCH path_principal = (aws:AWSAccount {id: $provider_uid})--(principal:AWSPrincipal)--(policy:AWSPolicy)--(stmt:AWSPolicyStatement)
```
### Check IAM Action Permissions
```cypher
WHERE stmt.effect = 'Allow'
AND any(action IN stmt.action WHERE
toLower(action) = 'iam:passrole'
OR toLower(action) = 'iam:*'
OR action = '*'
)
```
### Find Roles Trusting a Service
```cypher
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {arn: 'ec2.amazonaws.com'})
```
### Check Resource Scope
```cypher
WHERE any(resource IN stmt.resource WHERE
resource = '*'
OR target_role.arn CONTAINS resource
OR resource CONTAINS target_role.name
)
```
### Include Prowler Findings
```cypher
UNWIND nodes(path_principal) + nodes(path_target) as n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {status: 'FAIL', provider_uid: $provider_uid})
RETURN path_principal, path_target,
collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
```
---
## Common Node Labels by Provider
### AWS
| Label | Description |
| -------------------- | ----------------------------------- |
| `AWSAccount` | AWS account root |
| `AWSPrincipal` | IAM principal (user, role, service) |
| `AWSRole` | IAM role |
| `AWSUser` | IAM user |
| `AWSPolicy` | IAM policy |
| `AWSPolicyStatement` | Policy statement |
| `EC2Instance` | EC2 instance |
| `EC2SecurityGroup` | Security group |
| `S3Bucket` | S3 bucket |
| `RDSInstance` | RDS database instance |
| `LoadBalancer` | Classic ELB |
| `LoadBalancerV2` | ALB/NLB |
| `LaunchTemplate` | EC2 launch template |
### Common Relationships
| Relationship | Description |
| ---------------------- | ----------------------- |
| `TRUSTS_AWS_PRINCIPAL` | Role trust relationship |
| `STS_ASSUMEROLE_ALLOW` | Can assume role |
| `POLICY` | Has policy attached |
| `STATEMENT` | Policy has statement |
---
## Parameters
For queries requiring user input, define parameters:
```python
parameters=[
AttackPathsQueryParameterDefinition(
name="ip",
label="IP address",
description="Public IP address, e.g. 192.0.2.0.",
placeholder="192.0.2.0",
),
AttackPathsQueryParameterDefinition(
name="tag_key",
label="Tag key",
description="Tag key to filter resources.",
placeholder="Environment",
),
],
```
---
## Best Practices
1. **Always filter by provider_uid**: Use `{id: $provider_uid}` on account nodes and `{provider_uid: $provider_uid}` on ProwlerFinding nodes
2. **Use consistent naming**: Follow existing patterns in the file
3. **Include Prowler findings**: Always add the OPTIONAL MATCH for ProwlerFinding nodes
4. **Return distinct findings**: Use `collect(DISTINCT pf)` to avoid duplicates
5. **Comment the query purpose**: Add inline comments explaining each MATCH clause
6. **Validate schema first**: Ensure all node labels and properties exist in Cartography schema
---
## openCypher Compatibility
Queries must be written in **openCypher Version 9** to ensure compatibility with both Neo4j and Amazon Neptune.
> **Why Version 9?** Amazon Neptune implements openCypher Version 9. By targeting this specification, queries work on both Neo4j and Neptune without modification.
### Avoid These (Not in openCypher spec)
| Feature | Reason |
| --------------------------------------------------- | ----------------------------------------------- |
| APOC procedures (`apoc.*`) | Neo4j-specific plugin, not available in Neptune |
| Virtual nodes (`apoc.create.vNode`) | APOC-specific |
| Virtual relationships (`apoc.create.vRelationship`) | APOC-specific |
| Neptune extensions | Not available in Neo4j |
| `reduce()` function | Use `UNWIND` + aggregation instead |
| `FOREACH` clause | Use `WITH` + `UNWIND` + `SET` instead |
| Regex match operator (`=~`) | Not supported in Neptune |
### CALL Subqueries
Supported with limitations:
- Use `WITH` clause to import variables: `CALL { WITH var ... }`
- Updates inside CALL subqueries are NOT supported
- Emitted variables cannot overlap with variables before the CALL
---
## Reference
### pathfinding.cloud (Attack Path Definitions)
- **Repository**: https://github.com/DataDog/pathfinding.cloud
- **All paths JSON**: `https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json`
- Use WebFetch to query specific paths or list available services
### Cartography Schema
- **URL pattern**: `https://raw.githubusercontent.com/{org}/cartography/refs/tags/{version}/docs/root/modules/{provider}/schema.md`
- Always use the version from `api/pyproject.toml`, not master/main
### openCypher Specification
- **Neptune openCypher compliance** (what Neptune supports): https://docs.aws.amazon.com/neptune/latest/userguide/feature-opencypher-compliance.html
- **Rewriting Cypher for Neptune** (converting Neo4j-specific syntax): https://docs.aws.amazon.com/neptune/latest/userguide/migration-opencypher-rewrites.html
- **openCypher project** (spec, grammar, TCK): https://github.com/opencypher/openCypher
---
## Learning from the Queries Module
**IMPORTANT**: Before creating a new query, ALWAYS read the entire queries module:
```
api/src/backend/api/attack_paths/queries/
├── __init__.py # Module exports
├── types.py # Type definitions
├── registry.py # Registry logic
└── {provider}.py # Provider queries (aws.py, etc.)
```
Use the existing queries to learn:
- Query structure and formatting
- Variable naming conventions
- How to include Prowler findings
- Comment style
> **Compatibility Warning**: Some existing queries use Neo4j-specific features
> (e.g., `apoc.create.vNode`, `apoc.create.vRelationship`, regex `=~`) that are
> **NOT compatible** with Amazon Neptune. Use these queries to learn general
> patterns (structure, naming, Prowler findings integration, comment style) but
> **DO NOT copy APOC procedures or other Neo4j-specific syntax** into new queries.
> New queries must be pure openCypher Version 9. Refer to the
> [openCypher Compatibility](#opencypher-compatibility) section for the full list
> of features to avoid.
**DO NOT** use generic templates. Match the exact style of existing **compatible** queries in the file.

View File

@@ -2,7 +2,15 @@
All notable changes to the **Prowler UI** are documented in this file.
## [1.18.0] (Prowler UNRELEASED)
## [1.18.1] (Prowler UNRELEASED)
### 🐞 Fixed
- Scans page polling now only refreshes scan table data instead of re-rendering the entire server component tree, eliminating redundant API calls to providers, findings, and compliance endpoints every 5 seconds
---
## [1.18.0] (Prowler v5.18.0)
### 🔄 Changed

View File

@@ -1,21 +1,19 @@
import { Suspense } from "react";
import { getAllProviders } from "@/actions/providers";
import { getScans, getScansByState } from "@/actions/scans";
import { getScans } from "@/actions/scans";
import { auth } from "@/auth.config";
import { MutedFindingsConfigButton } from "@/components/providers";
import {
AutoRefresh,
NoProvidersAdded,
NoProvidersConnected,
ScansFilters,
} from "@/components/scans";
import { LaunchScanWorkflow } from "@/components/scans/launch-workflow";
import { SkeletonTableScans } from "@/components/scans/table";
import { ColumnGetScans } from "@/components/scans/table/scans";
import { ScansTableWithPolling } from "@/components/scans/table/scans";
import { ContentLayout } from "@/components/ui";
import { CustomBanner } from "@/components/ui/custom/custom-banner";
import { DataTable } from "@/components/ui/table";
import {
createProviderDetailsMapping,
extractProviderUIDs,
@@ -57,15 +55,6 @@ export default async function Scans({
const hasManageScansPermission = session?.user?.permissions?.manage_scans;
// Get scans data to check for executing scans
const scansData = await getScansByState();
const hasExecutingScan = scansData?.data?.some(
(scan: ScanProps) =>
scan.attributes.state === "executing" ||
scan.attributes.state === "available",
);
// Extract provider UIDs and create provider details mapping for filtering
const providerUIDs = providersData ? extractProviderUIDs(providersData) : [];
const providerDetails = providersData
@@ -82,7 +71,6 @@ export default async function Scans({
return (
<ContentLayout title="Scans" icon="lucide:timer">
<AutoRefresh hasExecutingScan={hasExecutingScan} />
<>
<>
{!hasManageScansPermission ? (
@@ -177,11 +165,10 @@ const SSRDataTableScans = async ({
}) || [];
return (
<DataTable
key={`scans-${Date.now()}`}
columns={ColumnGetScans}
data={expandedScansData || []}
metadata={meta}
<ScansTableWithPolling
initialData={expandedScansData}
initialMeta={meta}
searchParams={searchParams}
/>
);
};

View File

@@ -13,6 +13,7 @@ import { Form } from "@/components/ui/form";
import { toast } from "@/components/ui/toast";
import { onDemandScanFormSchema } from "@/types";
import { SCAN_LAUNCHED_EVENT } from "../table/scans/scans-table-with-polling";
import { SelectScanProvider } from "./select-scan-provider";
type ProviderInfo = {
@@ -85,6 +86,8 @@ export const LaunchScanWorkflow = ({
});
// Reset form after successful submission
form.reset();
// Notify the scans table to refresh and pick up the new scan
window.dispatchEvent(new Event(SCAN_LAUNCHED_EVENT));
}
};

View File

@@ -1,3 +1,4 @@
export * from "./column-get-scans";
export * from "./data-table-row-actions";
export * from "./data-table-row-details";
export * from "./scans-table-with-polling";

View File

@@ -0,0 +1,126 @@
"use client";
import { useCallback, useEffect, useState } from "react";
import { getScans } from "@/actions/scans";
import { AutoRefresh } from "@/components/scans";
import { DataTable } from "@/components/ui/table";
import { MetaDataProps, ScanProps, SearchParamsProps } from "@/types";
import { ColumnGetScans } from "./column-get-scans";
export const SCAN_LAUNCHED_EVENT = "scan-launched";
interface ScansTableWithPollingProps {
initialData: ScanProps[];
initialMeta?: MetaDataProps;
searchParams: SearchParamsProps;
}
const EXECUTING_STATES = ["executing", "available"] as const;
function expandScansWithProviderInfo(
scans: ScanProps[],
included?: Array<{ type: string; id: string; attributes: any }>,
) {
return (
scans?.map((scan) => {
const providerId = scan.relationships?.provider?.data?.id;
if (!providerId) {
return { ...scan, providerInfo: undefined };
}
const providerData = included?.find(
(item) => item.type === "providers" && item.id === providerId,
);
if (!providerData) {
return { ...scan, providerInfo: undefined };
}
return {
...scan,
providerInfo: {
provider: providerData.attributes.provider,
uid: providerData.attributes.uid,
alias: providerData.attributes.alias,
},
};
}) || []
);
}
export function ScansTableWithPolling({
initialData,
initialMeta,
searchParams,
}: ScansTableWithPollingProps) {
const [scansData, setScansData] = useState<ScanProps[]>(initialData);
const [meta, setMeta] = useState<MetaDataProps | undefined>(initialMeta);
const hasExecutingScan = scansData.some((scan) =>
EXECUTING_STATES.includes(
scan.attributes.state as (typeof EXECUTING_STATES)[number],
),
);
const handleRefresh = useCallback(async () => {
const page = parseInt(searchParams.page?.toString() || "1", 10);
const pageSize = parseInt(searchParams.pageSize?.toString() || "10", 10);
const sort = searchParams.sort?.toString();
const filters = Object.fromEntries(
Object.entries(searchParams).filter(
([key]) => key.startsWith("filter[") && key !== "scanId",
),
);
const query = (filters["filter[search]"] as string) || "";
const result = await getScans({
query,
page,
sort,
filters,
pageSize,
include: "provider",
});
if (result?.data) {
const expanded = expandScansWithProviderInfo(
result.data,
result.included,
);
setScansData(expanded);
if (result && "meta" in result) {
setMeta(result.meta as MetaDataProps);
}
}
}, [searchParams]);
// Listen for scan launch events to trigger an immediate refresh
useEffect(() => {
const handler = () => {
handleRefresh();
};
window.addEventListener(SCAN_LAUNCHED_EVENT, handler);
return () => window.removeEventListener(SCAN_LAUNCHED_EVENT, handler);
}, [handleRefresh]);
return (
<>
<AutoRefresh
hasExecutingScan={hasExecutingScan}
onRefresh={handleRefresh}
/>
<DataTable
key={`scans-${scansData.length}-${meta?.pagination?.page}`}
columns={ColumnGetScans}
data={scansData}
metadata={meta}
/>
</>
);
}

View File

@@ -21,7 +21,9 @@ export const TableLink = ({ href, label, isDisabled }: TableLinkProps) => {
return (
<Button asChild variant="link" size="sm" className="text-xs">
<Link href={href}>{label}</Link>
<Link href={href} prefetch={false}>
{label}
</Link>
</Button>
);
};

View File

@@ -105,6 +105,7 @@ export class ScansPage extends BasePage {
await expect(this.scanTable).toBeVisible();
// Find a row that contains the account ID (provider UID in Cloud Provider column)
// Note: Use a more specific locator strategy if possible in the future
const rowWithAccountId = this.scanTable
.locator("tbody tr")
.filter({ hasText: accountId })