Compare commits

...

301 Commits

Author SHA1 Message Date
Víctor Fernández Poyatos
926f449ae6 fix(overviews): manage overview exceptions and use batch_size with bulk (#7140) 2025-03-06 15:39:51 +01:00
César Arroba
646668c6ae chore(ui-gha): delete double quotes on prowler version (#7139) 2025-03-06 15:39:40 +01:00
Prowler Bot
8e6b92792b fix(groups): display uid if alias is missing (#7138)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-03-06 14:41:57 +01:00
Prowler Bot
65c081ce38 fix(credentials): adjust helper links to fit width (#7134)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-03-06 13:01:10 +01:00
Pepe Fagoaga
5600131d6a revert(findings): change uid from varchar to text (#7132) 2025-03-06 11:43:07 +01:00
César Arroba
dbd271980f chore(ui-gha): add version prefix (#7125) 2025-03-05 16:34:42 +01:00
Víctor Fernández Poyatos
ff532a899e fix(reports): Fix task kwargs and result (#7124) 2025-03-05 16:34:36 +01:00
César Arroba
0c9675ec70 chore(ui): add prowler version on build (#7120) 2025-03-05 16:34:26 +01:00
Pablo Lara
d45eda2b2b feat(compliance): new compliance selector (#7118) 2025-03-05 16:34:07 +01:00
dependabot[bot]
0abcf80d19 chore(deps-dev): bump pytest from 8.3.4 to 8.3.5 (#7097)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-05 16:33:35 +01:00
Pablo Lara
c0e10fd395 chore(ui): update label from 'Select a scan job' to 'Select a cloud p… (#7107) 2025-03-05 16:32:20 +01:00
Pepe Fagoaga
a80e9b26a8 chore(api): Use Prowler from v5.4 (#7092) 2025-03-03 22:30:58 +05:45
Sergio Garcia
2a9cd57fb8 fix(deps): update vulnerable cryptography dependency (#6993) 2025-03-03 17:38:23 +01:00
Prowler Bot
a0ad1a5f49 chore(deps): bump cryptography from 43.0.1 to 44.0.1 in /api (#7003)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-03-03 17:21:15 +01:00
César Arroba
dff22dd166 Revert "chore(api): update prowler version to 5.4 (#7091)"
This reverts commit 58138810b9.
2025-03-03 17:11:20 +01:00
César Arroba
58138810b9 chore(api): update prowler version to 5.4 (#7091)
Co-authored-by: Prowler Bot <bot@prowler.com>
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-03-03 17:08:45 +01:00
Víctor Fernández Poyatos
1ecc272fe4 chore: update changelog 2025-03-03 16:27:05 +01:00
Pablo Lara
b784167006 fix(roles): show the correct error message (#7089) 2025-03-03 15:50:28 +01:00
Pablo Lara
cf0ec8dea0 fix: bug with create role and unlimited visibility checkbox (#7088) 2025-03-03 15:50:23 +01:00
Sergio Garcia
96cae5e961 feat(aws): add fixers for threat detection checks (#7085) 2025-03-03 15:50:18 +01:00
Pablo Lara
a48e5cb15f feat(version): add prowler version to the sidebar (#7086) 2025-03-03 15:50:14 +01:00
Pablo Lara
5a9ff007e0 chore: Update the latest table findings with the most recent changes (#7084) 2025-03-03 15:50:06 +01:00
Prowler Bot
24c45f894c chore(regions_update): Changes in regions for AWS services (#7034)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 15:47:09 +01:00
dependabot[bot]
5d03c85629 chore(deps): bump cryptography from 43.0.1 to 44.0.1 in /api (#7001)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 15:46:15 +01:00
Pablo Lara
41dc397a7a feat(sidebar): sidebar with new functionalities (#7018) 2025-03-03 15:01:38 +01:00
Prowler Bot
237a9adce9 chore(regions_update): Changes in regions for AWS services (#7067)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 15:01:29 +01:00
Sergio Garcia
a06167f1c2 fix(threat detection): run single threat detection check (#7065) 2025-03-03 15:01:21 +01:00
Pepe Fagoaga
a7d58c40dd refactor(stats): Use Finding instead of Check_Report (#7053)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2025-03-03 15:01:12 +01:00
Pepe Fagoaga
e260c46389 chore(examples): Scan AWS (#7064) 2025-03-03 15:01:02 +01:00
Sergio Garcia
115169a596 chore(gcp): enhance GCP APIs logic (#7046) 2025-03-03 15:00:49 +01:00
dependabot[bot]
5b19173c1d chore(deps): bump trufflesecurity/trufflehog from 3.88.13 to 3.88.14 (#7063)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 15:00:40 +01:00
Daniel Barranquero
d3dd1644e6 feat(m365): add sharepoint service with 4 checks (#7057)
Co-authored-by: MarioRgzLpz <mariorgzlpz1809@gmail.com>
Co-authored-by: HugoPBrito <hugopbrit@gmail.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-03-03 15:00:29 +01:00
Pedro Martín
8ff0c59964 feat(docs): add info related with sts assume role and regions (#7062) 2025-03-03 15:00:19 +01:00
Daniel Barranquero
285939c389 fix(azure): handle account not supporting Blob (#7060) 2025-03-03 15:00:04 +01:00
Sergio Garcia
a62ae8af51 fix(ecs): ensure unique finding id in ECS checks (#7059) 2025-03-03 14:59:56 +01:00
Prowler Bot
5d78b9e439 chore(regions_update): Changes in regions for AWS services (#7056)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 14:59:45 +01:00
Hugo Pereira Brito
1056c270ca feat(microsoft365): add new check entra_policy_ensure_default_user_cannot_create_tenants (#6918)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-03-03 14:59:35 +01:00
Pablo Lara
eeef6600b7 feat(exports): download scan exports (#7006) 2025-03-03 14:59:14 +01:00
Pepe Fagoaga
e142f17abe fix(env): UI version must be stable (#7055) 2025-03-03 14:58:45 +01:00
Víctor Fernández Poyatos
a65d858dac fix(migrations): Fix migration dependency order (#7051) 2025-03-03 14:58:32 +01:00
Víctor Fernández Poyatos
6235a1ba41 feat(labeler): apply label on migration changes (#7052) 2025-03-03 14:58:22 +01:00
Pepe Fagoaga
05007d03ee fix(findings): change uid from varchar to text (#7048) 2025-03-03 14:58:13 +01:00
Víctor Fernández Poyatos
102d099947 feat(findings): Add Django management command to populate database with dummy data (#7049) 2025-03-03 14:58:03 +01:00
Adrián Jesús Peña Rodríguez
3194675a5c feat(export): add API export system (#6878) 2025-03-03 14:57:40 +01:00
dependabot[bot]
14e6e4aa68 chore(deps-dev): bump black from 24.10.0 to 25.1.0 (#6733)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-03-03 14:57:22 +01:00
Pedro Martín
b24c3665b5 feat(aws): add ISO 27001 2022 compliance framework (#7035)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-03-03 14:56:31 +01:00
Prowler Bot
1f60878867 chore(regions_update): Changes in regions for AWS services (#7015)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 14:54:16 +01:00
dependabot[bot]
2dd18662d8 chore(deps): bump google-api-python-client from 2.161.0 to 2.162.0 (#7037)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:54:02 +01:00
Hugo Pereira Brito
175360dbe6 refactor(microsoft365): CheckReportMicrosoft365 and resource metadata (#6952) 2025-03-03 14:53:42 +01:00
Víctor Fernández Poyatos
80e24b971f feat(findings): Optimize findings endpoint (#7019) 2025-03-03 14:53:22 +01:00
Pepe Fagoaga
78877c470a chore(action): Conventional Commit Check (#7033) 2025-03-03 14:53:08 +01:00
dependabot[bot]
9c9b100359 chore(deps): bump trufflesecurity/trufflehog from 3.88.12 to 3.88.13 (#7026)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:52:51 +01:00
Pedro Martín
10f3232294 feat(outputs): add sample outputs (#6945) 2025-03-03 14:52:37 +01:00
Pedro Martín
a2e5f70f36 fix(cis): show report table on the CLI (#6979) 2025-03-03 14:52:23 +01:00
Pedro Martín
8d8b31c757 feat(azure): add PCI DSS 4.0 (#6982) 2025-03-03 14:52:03 +01:00
Pedro Martín
cba1e718b9 feat(kubernetes): add PCI DSS 4.0 (#7013) 2025-03-03 14:51:42 +01:00
Pedro Martín
6c3c37fc26 feat(dashboard): take the latest finding uid by timestamp (#6987) 2025-03-03 14:51:29 +01:00
Víctor Fernández Poyatos
b610cacd0c feat(tasks): add deletion queue for deletion tasks (#7022) 2025-03-03 14:51:14 +01:00
Pedro Martín
027a5705cb feat(gcp): add PCI DSS 4.0 (#7010) 2025-03-03 14:51:00 +01:00
Prowler Bot
b7fbfb4360 chore(regions_update): Changes in regions for AWS services (#7011)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 14:50:41 +01:00
dependabot[bot]
5acf0a7e3d chore(deps-dev): bump mkdocs-material from 9.6.4 to 9.6.5 (#7007)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:50:26 +01:00
Raj Chowdhury
3a25e86e30 fix(typo): solve typo in dashboard.md (#7009) 2025-03-03 14:50:06 +01:00
dependabot[bot]
50f1592eb3 chore(deps): bump trufflesecurity/trufflehog from 3.88.11 to 3.88.12 (#7008)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:49:46 +01:00
César Arroba
0f2927cb88 feat(api): setup sentry for OSS API (#6874) 2025-03-03 14:49:32 +01:00
Pablo Lara
b4e1434052 chore(users): renaming the account now triggers a re-render in the sidebar (#7005) 2025-03-03 14:49:16 +01:00
dependabot[bot]
43710783f9 chore(deps): bump python from 3.12.8-alpine3.20 to 3.12.9-alpine3.20 (#6882)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:49:04 +01:00
dependabot[bot]
16f767e7b9 chore(deps): bump tzlocal from 5.2 to 5.3 (#6932)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:48:41 +01:00
Hugo Pereira Brito
42818217a0 docs(tutorials): update all deprecated poetry shell references (#7002) 2025-03-03 14:44:50 +01:00
Prowler Bot
13405594b2 chore(regions_update): Changes in regions for AWS services (#6998)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 14:44:33 +01:00
Pedro Martín
b5a3852334 chore(github): add compliance to PR labeler (#6996) 2025-03-03 14:44:16 +01:00
Hugo Pereira Brito
181ff1acb3 docs(installation): add warning for poetry shell deprecation in README (#6983) 2025-03-03 14:43:35 +01:00
Pablo Lara
91e59a3279 chore(findings): add 'Status Extended' attribute to finding details (#6997) 2025-03-03 14:43:20 +01:00
Pedro Martín
b3fad1a765 feat(aws): add PCI DSS 4.0 (#6949) 2025-03-03 14:42:41 +01:00
dependabot[bot]
6f90927a79 chore(deps): bump trufflesecurity/trufflehog from 3.88.9 to 3.88.11 (#6988)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:42:24 +01:00
dependabot[bot]
0bb4a9a3e9 chore(deps): bump kubernetes from 32.0.0 to 32.0.1 (#6992)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:42:10 +01:00
Pablo Lara
80d9cde60b feat(scans): update the progress for executing scans (#6972) 2025-03-03 14:41:39 +01:00
César Arroba
11196c2f83 chore(gha): trigger API or UI deployment when push to master (#6946) 2025-03-03 14:41:21 +01:00
Prowler Bot
55a0d0a1b5 chore(regions_update): Changes in regions for AWS services (#6978) 2025-03-03 14:41:06 +01:00
Pedro Martín
4e5e1d7bd4 feat(aws): add compliance CIS 4.0 (#6937) 2025-03-03 14:40:17 +01:00
dependabot[bot]
06a0a434ab chore(deps-dev): bump flake8 from 7.1.1 to 7.1.2 (#6954)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:39:58 +01:00
Pepe Fagoaga
153833fc55 fix(ocsf): Adapt for 1.4.0 (#6971) 2025-03-03 14:38:57 +01:00
Prowler Bot
fc4877975f chore(regions_update): Changes in regions for AWS services (#6968)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 14:38:21 +01:00
dependabot[bot]
0797efd4fd chore(deps-dev): bump bandit from 1.8.2 to 1.8.3 (#6955)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:38:07 +01:00
Prowler Bot
fbec99a0b7 chore(regions_update): Changes in regions for AWS services (#6944)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 14:37:28 +01:00
dependabot[bot]
b2cb1de95e chore(deps): bump trufflesecurity/trufflehog from 3.88.8 to 3.88.9 (#6943)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:37:06 +01:00
dependabot[bot]
190c2316d7 chore(deps): bump google-api-python-client from 2.160.0 to 2.161.0 (#6933)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:36:16 +01:00
César Arroba
f6c352281a fix(gha): fix short sha step (#6939) 2025-03-03 14:36:00 +01:00
César Arroba
66dfe89936 chore(gha): add tag for api and ui images on push to master (#6920) 2025-03-03 14:35:41 +01:00
dependabot[bot]
8b3942ca49 chore(deps): bump trufflesecurity/trufflehog from 3.88.7 to 3.88.8 (#6931)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:35:06 +01:00
dependabot[bot]
9d35213bd5 chore(deps-dev): bump mkdocs-material from 9.6.3 to 9.6.4 (#6913)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:34:13 +01:00
Víctor Fernández Poyatos
3e586e615d feat(social-login): Add social login integration for Google and Github OAuth providers (#6906) 2025-03-03 14:33:12 +01:00
Sergio Garcia
a4f950e093 chore(docs): external K8s cluster Prowler App credentials (#6921) 2025-03-03 14:32:30 +01:00
Pedro Martín
7c2441f6ff fix(gcp): remove typos on CIS 3.0 (#6917) 2025-03-03 14:31:48 +01:00
dependabot[bot]
0a92af3eb2 chore(deps): bump trufflesecurity/trufflehog from 3.88.6 to 3.88.7 (#6915)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:31:25 +01:00
César Arroba
666f3a0e20 fix(gha): fix test build containers on pull requests actions (#6909) 2025-03-03 14:30:35 +01:00
dependabot[bot]
06ef98b5cc chore(deps-dev): bump coverage from 7.6.11 to 7.6.12 (#6897)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:29:39 +01:00
Prowler Bot
79125bdd40 chore(regions_update): Changes in regions for AWS services (#6900)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-03-03 14:28:42 +01:00
César Arroba
e8e8b085ac chore(api): test build container image on pull request (#6850) 2025-03-03 14:28:24 +01:00
César Arroba
c9b81d003a chore(ui): test build container image on pull request (#6849) 2025-03-03 14:27:33 +01:00
Pepe Fagoaga
23fa3c1e38 chore(version): Update version to 5.4.0 (#6894) 2025-03-03 14:26:51 +01:00
dependabot[bot]
03fbd0baca chore(deps-dev): bump coverage from 7.6.10 to 7.6.11 (#6887)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 14:24:50 +01:00
dependabot[bot]
d4fe24ef47 chore(deps): bump trufflesecurity/trufflehog from 3.88.5 to 3.88.6 (#6888)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 13:05:51 +01:00
Prowler Bot
9c5220ee98 fix(elasticache): improve logic in elasticache_redis_cluster_backup_enabled (#7045)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-26 12:12:44 +01:00
Prowler Bot
6491bce5a6 fix(azure): migrate resource models to avoid using SDK defaults (#7043)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2025-02-26 10:49:33 +01:00
Prowler Bot
ca375dd79c chore(iam): enhance iam_role_cross_service_confused_deputy_prevention recommendation (#7041)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-02-26 08:35:26 +01:00
Prowler Bot
e807573b54 fix(soc2_aws): remove duplicated checks (#6999)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-20 17:26:50 +05:30
Prowler Bot
c0f4c9743f fix(deps): update vulnerable cryptography dependency (#6994)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-20 16:04:14 +05:30
Prowler Bot
5974d0b5da fix(report): remove invalid resources in report (#6984)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-20 11:05:58 +05:30
Prowler Bot
6244a8a5f7 fix(roles): handle empty response in deleteRole and ensure revalidation (#6977)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-02-19 09:06:56 +01:00
Prowler Bot
5b9dae4529 test(cloudfront): add name retrieval test for cloudfront bucket domains (#6975)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-02-19 09:12:17 +05:30
Prowler Bot
a424374c44 fix(cloudfront): Incorrect bucket name retrievement (#6967)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-02-19 08:08:31 +05:30
Prowler Bot
b7fc2542e8 fix(gcp): Correct false positive when sslMode=ENCRYPTED_ONLY in CloudSQL (#6942)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2025-02-14 16:36:26 -05:00
Prowler Bot
83a1598a1e fix(issue pages): apply sorting by default in issue pages (#6935)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-02-14 10:35:49 +01:00
Prowler Bot
b22b56a06b fix(gcp): handle DNS Managed Zone with no DNSSEC (#6928)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-13 14:50:05 -05:00
Prowler Bot
5020e4713c fix(aws): codebuild service threw KeyError for projects type CODEPIPELINE (#6930)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-02-13 13:53:05 -05:00
Prowler Bot
ee534a740e fix(aws): SNS threw IndexError if SubscriptionArn is PendingConfirmation (#6923)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-02-13 10:35:19 -05:00
Prowler Bot
48cb45b7a8 fix(aws): handle AccessDenied when retrieving resource policy (#6912)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-12 19:15:51 -05:00
Prowler Bot
91b74822e9 fix(kms): Amazon KMS API call error handling (#6904)
Co-authored-by: Ogonna Iwunze <1915636+wunzeco@users.noreply.github.com>
2025-02-12 11:08:47 -05:00
Pepe Fagoaga
287eef5085 chore(version): Update version to 5.3.1 (#6895) 2025-02-12 16:39:09 +05:45
Mario Rodriguez Lopez
45d359c84a feat(microsof365): Add documentation and compliance file (#6195)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2025-02-10 17:18:43 +01:00
Pablo Lara
6049e5d4e8 chore: Update prowler api version (#6877)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-02-10 17:39:08 +05:45
Víctor Fernández Poyatos
dfd377f89e chore(api): Update changelog and specs (#6876) 2025-02-10 12:25:04 +01:00
Víctor Fernández Poyatos
37e6c52c14 chore: Add needed steps for API in PR template (#6875) 2025-02-10 12:24:51 +01:00
Pepe Fagoaga
d6a7f4d88f fix(kubernetes): Change UID validation (#6869)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-10 11:24:34 +01:00
Pepe Fagoaga
239cda0a90 chore: Rename dashboard table latest findings (#6873)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-02-10 11:24:27 +01:00
dependabot[bot]
4a821e425b chore(deps-dev): bump mkdocs-material from 9.6.2 to 9.6.3 (#6871)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:24:21 +01:00
Sergio Garcia
e1a2f0c204 docs(eks): add documentation about EKS onboarding (#6853)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-02-10 11:24:16 +01:00
Prowler Bot
c70860c733 chore(regions_update): Changes in regions for AWS services (#6858)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-02-10 11:24:09 +01:00
Víctor Fernández Poyatos
05e71e033f feat(findings): Use ArrayAgg and subqueries on metadata endpoint (#6863)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-02-10 11:24:03 +01:00
Pablo Lara
5164ec2eb9 feat: implement new functionality with inserted_at__gte in findings a… (#6864) 2025-02-10 11:23:58 +01:00
Víctor Fernández Poyatos
be18dac4f9 docs: Add details about user creation in Prowler app (#6862) 2025-02-10 11:23:52 +01:00
dependabot[bot]
bb126c242f chore(deps): bump microsoft-kiota-abstractions from 1.9.1 to 1.9.2 (#6856)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:23:47 +01:00
Víctor Fernández Poyatos
e27780a856 feat(findings): Require date filters for findings endpoints (#6800) 2025-02-10 11:23:41 +01:00
Pranay Girase
196ec51751 fix(typo): typos in Dashboard and Report in HTML (#6847) 2025-02-10 11:23:33 +01:00
Prowler Bot
86abf9e64c chore(regions_update): Changes in regions for AWS services (#6848)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-02-10 11:23:28 +01:00
dependabot[bot]
9d8be578e3 chore(deps): bump trufflesecurity/trufflehog from 3.88.4 to 3.88.5 (#6844)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:23:19 +01:00
Mario Rodriguez Lopez
b3aa800082 feat(entra): add new check entra_thirdparty_integrated_apps_not_allowed (#6357)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-10 11:23:13 +01:00
Ogonna Iwunze
501674a778 feat(kms): add kms_cmk_not_multi_region AWS check (#6794)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-10 11:23:01 +01:00
Prowler Bot
5ff6ae79d8 chore(regions_update): Changes in regions for AWS services (#6821)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-02-10 11:21:08 +01:00
Mario Rodriguez Lopez
e518a869ab feat(entra): add new entra service for Microsoft365 (#6326)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-10 11:20:58 +01:00
Mario Rodriguez Lopez
43927a62f3 feat(microsoft365): add new check admincenter_settings_password_never_expire (#6023)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-10 11:20:52 +01:00
dependabot[bot]
335980c8d8 chore(deps): bump kubernetes from 31.0.0 to 32.0.0 (#6678)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:20:42 +01:00
Pablo Lara
ca3ee378db style(forms): improve spacing consistency (#6814) 2025-02-10 11:17:11 +01:00
Pablo Lara
c05bc1068a chore(forms): improvements to the sign-in and sign-up forms (#6813) 2025-02-10 11:17:03 +01:00
Drew Kerrigan
2e3164636d docs(): add description of changed and new delta values to prowler app tutorial (#6801) 2025-02-10 11:16:56 +01:00
dependabot[bot]
c34e07fc40 chore(deps): bump pytz from 2024.2 to 2025.1 (#6765)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:16:50 +01:00
dependabot[bot]
6022122a61 chore(deps-dev): bump mkdocs-material from 9.5.50 to 9.6.2 (#6799)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:16:44 +01:00
dependabot[bot]
f65f5e4b46 chore(deps-dev): bump pylint from 3.3.3 to 3.3.4 (#6721)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:16:37 +01:00
Pablo Lara
dee17733a0 feat(scans): show scan details right after launch (#6791) 2025-02-10 11:16:30 +01:00
dependabot[bot]
cddda1e64e chore(deps): bump trufflesecurity/trufflehog from 3.88.2 to 3.88.4 (#6760)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:16:23 +01:00
dependabot[bot]
f7b873db03 chore(deps): bump google-api-python-client from 2.159.0 to 2.160.0 (#6720)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:16:17 +01:00
Víctor Fernández Poyatos
792bc70d0a feat(schedules): Rework daily schedule to always show the next scan (#6700) 2025-02-10 11:16:11 +01:00
Hugo Pereira Brito
185491b061 fix: microsoft365 mutelist (#6724) 2025-02-10 11:16:05 +01:00
dependabot[bot]
3af8a43480 chore(deps): bump microsoft-kiota-abstractions from 1.6.8 to 1.9.1 (#6734)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:15:57 +01:00
Pablo Lara
fd78406b29 fix: Enable hot reloading when using Docker Compose for UI (#6750) 2025-02-10 11:15:49 +01:00
Matt Johnson
4758b258a3 chore: Update Google Analytics ID across all docs.prowler.com sites. (#6730) 2025-02-10 11:15:41 +01:00
dependabot[bot]
015e2b3b88 chore(deps): bump uuid from 10.0.0 to 11.0.5 in /ui (#6516)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:15:33 +01:00
Mario Rodriguez Lopez
e184c9cb61 feat(m365): add Microsoft 365 provider (#5902)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
Co-authored-by: HugoPBrito <hugopbrit@gmail.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-10 11:15:24 +01:00
dependabot[bot]
9004a01183 chore(deps): bump azure-mgmt-web from 7.3.1 to 8.0.0 (#6680)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:15:18 +01:00
dependabot[bot]
dd65ba3d9e chore(deps): bump msgraph-sdk from 1.17.0 to 1.18.0 (#6679)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:15:08 +01:00
dependabot[bot]
bba616a18f chore(deps): bump azure-storage-blob from 12.24.0 to 12.24.1 (#6666)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-10 11:14:33 +01:00
Pepe Fagoaga
aa0f8d2981 chore: bump for next minor (#6672) 2025-02-10 11:13:42 +01:00
Paolo Frigo
2511d6ffa9 docs: update # of checks, services, frameworks and categories (#6528)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-10 11:12:02 +01:00
Prowler Bot
27329457be fix(dashboard): adjust the bar chart display (#6868)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-02-07 11:04:23 -05:00
Prowler Bot
7189f3d526 fix(aws): key error for detect-secrets (#6865)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-02-07 10:04:54 -05:00
Prowler Bot
58e7589c9d fix(kms): handle error in DescribeKey function (#6842)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-02-05 15:27:43 -05:00
Prowler Bot
d60f4b5ded fix(cloudfront): fix false positive in s3 origins (#6838)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2025-02-05 13:36:49 -05:00
Prowler Bot
4c2ec094f6 fix(findings): Spelling mistakes correction (#6834)
Co-authored-by: Gary Mclean <gary.mclean@krrv.io>
2025-02-05 11:53:17 -05:00
Prowler Bot
395ecaff5b fix(directoryservice): handle ClientException (#6828)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2025-02-05 11:20:13 -05:00
Prowler Bot
c39506ef7d fix(aws) wording of report.status_extended in awslambda_function_not_publicly_accessible (#6831)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-02-05 11:18:27 -05:00
Prowler Bot
eb90d479e2 chore(aws_audit_manager_control_tower_guardrails): add checks to reqs (#6803)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-04 19:47:04 -05:00
Prowler Bot
b92a73f5ea fix(elasticache): InvalidReplicationGroupStateFault error (#6820)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-02-04 16:08:26 -05:00
Prowler Bot
ad121f3059 chore(deps-dev): bump moto from 5.0.27 to 5.0.28 (#6817)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-02-04 14:25:04 -05:00
Pepe Fagoaga
70e4c5a44e chore: bump for next patch (#6764) 2025-02-03 15:25:23 -05:00
Prowler Bot
b5a46b7b59 fix(cis_1.5_aws): add checks to needed reqs (#6798)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-02-03 11:37:53 -05:00
Prowler Bot
f1a97cd166 fix(cis_1.4_aws): add checks to needed reqs (#6796)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-02-03 11:37:39 -05:00
Prowler Bot
0774508093 fix(cis_2.0_aws): add checks to needed reqs (#6787)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-02-03 11:37:24 -05:00
Prowler Bot
0664ce6b94 fix(gcp): fix wrong provider value in check (#6789)
Co-authored-by: secretcod3r <101349794+secretcod3r@users.noreply.github.com>
2025-02-03 10:32:53 -05:00
Prowler Bot
407c779c52 fix(findings): remove default status filtering (#6785)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-02-03 15:25:11 +01:00
Prowler Bot
c60f13f23f fix(findings): order findings by inserted_at DESC (#6783)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-02-03 11:57:45 +01:00
Prowler Bot
37d912ef01 fix(celery): Kill celery worker process after every task to release memory (#6763)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-31 19:37:34 +05:45
Pepe Fagoaga
d3de89c017 chore: bump for next patch (#6762) 2025-01-31 19:34:54 +05:45
Prowler Bot
cb22af25c6 fix(db_event): Handle other events (#6757)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-30 21:48:42 +05:45
Prowler Bot
a534b94495 fix(set_report_color): Add more details to error (#6755)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-30 21:46:34 +05:45
Prowler Bot
6262b4ff0b feat(scans): Optimize read queries during scans (#6756)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-30 20:56:11 +05:45
Prowler Bot
84ecd7ab2c feat(findings): Improve /findings/metadata performance (#6749)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-30 18:39:25 +05:45
Prowler Bot
1a5428445a fix(neptune): correct service name (#6747)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-30 17:52:00 +05:45
Prowler Bot
ac8e991ca0 fix(aws): iam_user_with_temporary_credentials resource in OCSF (#6741)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-01-30 17:17:20 +05:45
Prowler Bot
83a0331472 fix(acm): Key Error DomainName (#6744)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-30 17:17:05 +05:45
Prowler Bot
cce31e2971 fix(finding): raise when generating invalid findings (#6745)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-30 17:16:50 +05:45
Prowler Bot
0adf7d6e77 fix(sns): Add region to subscriptions (#6740)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-30 15:46:55 +05:45
Pepe Fagoaga
295f8b557e chore: bump for next patch (#6727) 2025-01-29 20:00:47 +05:45
Prowler Bot
bb2c5c3161 fix(scans): change label for next scan (#6726)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-29 10:49:35 +01:00
Prowler Bot
0018f36a36 fix(migrations): Use indexes instead of constraints to define an index (#6723)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-29 14:29:03 +05:45
Prowler Bot
857de84f49 fix(defender): add field to SecurityContacts (#6715)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-01-29 13:53:14 +05:45
Prowler Bot
9630f2242a revert: Update Django DB manager to use psycopg3 and connection pooling (#6719)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-28 22:33:32 +05:45
Prowler Bot
1fe125867c fix(scan-summaries): Improve efficiency on providers overview (#6718)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-28 22:15:22 +05:45
Prowler Bot
0737893240 fix(scans): filters and sorting for scan table (#6714)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-28 13:29:39 +01:00
Prowler Bot
282fe3d348 fix(scans, findings): Improve API performance ordering by inserted_at instead of id (#6712)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-28 17:28:26 +05:45
Prowler Bot
b5d83640ae fix: fixed bug when opening finding details while a scan is in progress (#6709)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-28 07:01:43 +01:00
Prowler Bot
2823d3ad21 fix(cloudsql): add trusted client certificates case for cloudsql_instance_ssl_connections (#6687)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2025-01-24 12:19:05 -05:00
Prowler Bot
00b93bfe86 fix(cloudwatch): NoneType object is not iterable (#6677)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-01-23 13:25:02 -05:00
Pepe Fagoaga
84c253d887 chore: bump for next patch (#6673) 2025-01-23 13:13:05 -05:00
Pepe Fagoaga
2ab5a702c9 chore(api): Bump to v1.3.0 (#6670) 2025-01-23 16:40:59 +01:00
Sergio Garcia
11d9cdf24e feat(resource metadata): add resource metadata to JSON OCSF (#6592)
Co-authored-by: Rubén De la Torre Vico <ruben@prowler.com>
2025-01-23 16:08:13 +01:00
Rubén De la Torre Vico
b1de41619b fix: add missing Check_Report_Azure parameters (#6583) 2025-01-23 16:07:23 +01:00
Rubén De la Torre Vico
18a4881a51 feat(network): extract Network resource metadata automated (#6555)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-01-23 16:06:44 +01:00
Rubén De la Torre Vico
de76a168c0 feat(storage): extract Storage resource metadata automated (#6563)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-01-23 16:06:40 +01:00
Rubén De la Torre Vico
bcc13a742d feat(vm): extract VM resource metadata automated (#6564) 2025-01-23 16:06:32 +01:00
Rubén De la Torre Vico
23b584f4bf feat(sqlserver): extract SQL Server resource metadata automated (#6562) 2025-01-23 16:06:25 +01:00
Daniel Barranquero
074b7c1ff5 feat(aws): include resource metadata to remaining checks (#6551)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-01-23 16:06:18 +01:00
Rubén De la Torre Vico
c04e9ed914 feat(postgresql): extract PostgreSQL resource metadata automated (#6560) 2025-01-23 16:06:11 +01:00
Rubén De la Torre Vico
1f1b126e79 feat(policy): extract Policy resource metadata automated (#6558) 2025-01-23 16:06:06 +01:00
Rubén De la Torre Vico
9b0dd80f13 feat(entra): extract Entra resource metadata automated (#6542)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-01-23 16:06:01 +01:00
Rubén De la Torre Vico
b0807478f2 feat(monitor): extract monitor resource metadata automated (#6554)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-01-23 16:05:54 +01:00
Rubén De la Torre Vico
11dfa58ecd feat(mysql): extract MySQL resource metadata automated (#6556) 2025-01-23 16:05:48 +01:00
Rubén De la Torre Vico
412f396e0a feat(keyvault): extract KeyVault resource metadata automated (#6553) 2025-01-23 16:05:44 +01:00
Rubén De la Torre Vico
8100d43ff2 feat(iam): extract IAM resource metadata automated (#6552) 2025-01-23 16:05:38 +01:00
Sergio Garcia
bc96acef48 fix(gcp): iterate through service projects (#6549)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2025-01-23 16:05:24 +01:00
Hugo Pereira Brito
6e9876b61a feat(aws): include resource metadata in services from r* to s* (#6536)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-01-23 16:05:17 +01:00
Pedro Martín
32253ca4f7 feat(gcp): add resource metadata to report (#6500)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-01-23 16:05:08 +01:00
Hugo Pereira Brito
4c6be5e283 feat(aws): include resource metadata in services from a* to b* (#6504)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-01-23 16:05:03 +01:00
Daniel Barranquero
69178fd7bd chore(aws): add resource metadata to services from t to w (#6546) 2025-01-23 16:04:49 +01:00
Daniel Barranquero
cd4432c14b chore(aws): add resource metadata to services from f to o (#6545) 2025-01-23 16:04:40 +01:00
Rubén De la Torre Vico
0e47172aec feat(defender): extract Defender resource metadata in automated way (#6538) 2025-01-23 16:04:30 +01:00
Rubén De la Torre Vico
efe18ae8b2 feat(appinsights): extract App Insights resource metadata in automated way (#6540) 2025-01-23 16:04:18 +01:00
Hugo Pereira Brito
d5e13df4fe feat: add resource metadata to emr_cluster_account_public_block_enabled (#6539) 2025-01-23 16:04:13 +01:00
Sergio Garcia
b0e84d74f2 feat(kubernetes): add resource metadata to report (#6479) 2025-01-23 16:04:05 +01:00
Hugo Pereira Brito
28526b591c feat(aws): include resource metadata in services from d* to e* (#6532)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-01-23 16:03:59 +01:00
Daniel Barranquero
51e6a6edb1 feat(aws): add resource metadata to all services starting with c (#6493) 2025-01-23 16:03:50 +01:00
Rubén De la Torre Vico
c1b132a29e feat(cosmosdb): extract CosmosDB resource metadata in automated way (#6533) 2025-01-23 16:03:43 +01:00
Rubén De la Torre Vico
e2530e7d57 feat(containerregistry): extract Container Registry resource metadata in automated way (#6530) 2025-01-23 16:03:37 +01:00
Rubén De la Torre Vico
f2fcb1599b feat(azure-app): extract Web App resource metadata in automated way (#6529) 2025-01-23 16:03:32 +01:00
Rubén De la Torre Vico
160eafa0c9 feat(aks): use Check_Report_Azure constructor properly in AKS checks (#6509) 2025-01-23 16:03:17 +01:00
Rubén De la Torre Vico
c2bc0f1368 feat(aisearch): use Check_Report_Azure constructor properly in AISearch checks (#6506) 2025-01-23 16:03:10 +01:00
Rubén De la Torre Vico
27d27fff81 feat(azure): include resource metadata in Check_Report_Azure (#6505) 2025-01-23 16:02:00 +01:00
Pepe Fagoaga
66e8f0ce18 chore(scan): Remove ._findings (#6667) 2025-01-23 15:58:37 +01:00
Pedro Martín
0ceae8361b feat(kubernetes): add CIS 1.10 compliance (#6508)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
# Conflicts:
#	prowler/compliance/kubernetes/cis_1.10_kubernetes.json
2025-01-23 15:39:40 +01:00
Pedro Martín
5e52fded83 feat(azure): add CIS 3.0 for Azure (#5226)
# Conflicts:
#	prowler/compliance/azure/cis_3.0_azure.json
2025-01-23 15:35:35 +01:00
Pablo Lara
0a7d07b4b6 chore: adjust DateWithTime component height when used with InfoField (#6669) 2025-01-23 15:21:42 +01:00
Pablo Lara
34b5f483d7 chore(scans): improve scan details (#6665) 2025-01-23 14:16:35 +01:00
Pedro Martín
9d04a1bc52 feat(detect-secrets): get secrets plugins from config.yaml (#6544)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-01-23 14:16:15 +01:00
dependabot[bot]
b25c0aaa00 chore(deps): bump azure-mgmt-containerservice from 33.0.0 to 34.0.0 (#6630)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 14:16:03 +01:00
dependabot[bot]
652b93ea45 chore(deps): bump azure-mgmt-compute from 33.1.0 to 34.0.0 (#6628)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 14:15:54 +01:00
Pepe Fagoaga
ccb7726511 fix(templates): Customize principals and add validation (#6655) 2025-01-23 14:15:41 +01:00
Anton Rubets
c514a4e451 chore(helm): Add prowler helm support (#6580)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-01-23 14:15:33 +01:00
Prowler Bot
d841bd6890 chore(regions_update): Changes in regions for AWS services (#6652)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-01-23 14:15:24 +01:00
dependabot[bot]
ff54e10ab2 chore(deps): bump boto3 from 1.35.94 to 1.35.99 (#6651)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 14:15:03 +01:00
Pepe Fagoaga
718e562741 chore(pre-commit): poetry checks for API and SDK (#6658) 2025-01-23 14:14:10 +01:00
Pablo Lara
ce05e2a939 feat(providers): show the cloud formation and terraform template links on the form (#6660) 2025-01-23 14:13:37 +01:00
Pablo Lara
90831d3084 feat(providers): make external id field mandatory in the aws role secret form (#6656) 2025-01-23 14:13:30 +01:00
dependabot[bot]
2c928dead5 chore(deps-dev): bump moto from 5.0.16 to 5.0.27 (#6632)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-01-23 14:13:23 +01:00
dependabot[bot]
3ffe147664 chore(deps): bump botocore from 1.35.94 to 1.35.99 (#6520)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-01-23 14:13:04 +01:00
dependabot[bot]
3373b0f47c chore(deps-dev): bump mkdocs-material from 9.5.49 to 9.5.50 (#6631)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 14:12:52 +01:00
Prowler Bot
b29db04560 chore(regions_update): Changes in regions for AWS services (#6599)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-01-23 14:12:38 +01:00
Hugo Pereira Brito
f964a0362e chore(services): delete all comment headers (#6585) 2025-01-23 14:11:58 +01:00
Prowler Bot
537b23dfae chore(regions_update): Changes in regions for AWS services (#6577)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-01-23 14:11:46 +01:00
Pablo Lara
48cf3528b4 feat(findings): add first seen in findings details (#6575) 2025-01-23 14:11:08 +01:00
dependabot[bot]
3c4b9d32c9 chore(deps): bump msgraph-sdk from 1.16.0 to 1.17.0 (#6547)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 14:09:46 +01:00
Víctor Fernández Poyatos
fd8361ae2c feat(db): Update Django DB manager to use psycopg3 and connection pooling (#6541)
# Conflicts:
#	api/poetry.lock
#	api/pyproject.toml
2025-01-23 14:07:34 +01:00
Prowler Bot
33cadaa932 chore(regions_update): Changes in regions for AWS services (#6526)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2025-01-23 14:02:14 +01:00
dependabot[bot]
cc126eb8b4 chore(deps): bump google-api-python-client from 2.158.0 to 2.159.0 (#6521)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 14:02:07 +01:00
Pedro Martín
c996b3f6aa docs(readme): update pr template to add check for readme (#6531) 2025-01-23 14:01:53 +01:00
Adrián Jesús Peña Rodríguez
e2b406a300 feat(finding): add first_seen attribute (#6460) 2025-01-23 14:01:46 +01:00
dependabot[bot]
c2c69da603 chore(deps): bump django from 5.1.4 to 5.1.5 in /api (#6519)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
# Conflicts:
#	api/poetry.lock
2025-01-23 14:01:33 +01:00
Adrián Jesús Peña Rodríguez
4e51348ff2 feat(provider-secret): make existing external_id field mandatory (#6510) 2025-01-23 14:00:42 +01:00
dependabot[bot]
3257b82706 chore(deps-dev): bump eslint-config-prettier from 9.1.0 to 10.0.1 in /ui (#6518)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 13:59:26 +01:00
Pepe Fagoaga
c98d764daa chore(version): set next minor (#6511)
# Conflicts:
#	prowler/config/config.py
#	pyproject.toml
2025-01-23 13:57:20 +01:00
Prowler Bot
0448429a9f chore(regions_update): Changes in regions for AWS services (#6495)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-01-23 13:56:43 +01:00
Pepe Fagoaga
b948ac6125 feat(prowler-role): Add templates to deploy it in AWS (#6499) 2025-01-23 13:56:28 +01:00
dependabot[bot]
3acc09ea16 chore(deps): bump jinja2 from 3.1.4 to 3.1.5 (#6457)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 13:56:09 +01:00
dependabot[bot]
82d0d0de9d chore(deps-dev): bump bandit from 1.8.0 to 1.8.2 (#6485)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-23 13:56:00 +01:00
Prowler Bot
f9cfc4d087 fix: add detector and line number of potential secret (#6663)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-01-22 10:55:49 -05:00
Pepe Fagoaga
7d68ff455b chore(api): Use prowler from v5.1 (#6659) 2025-01-22 20:04:55 +05:45
Pepe Fagoaga
ddf4881971 chore(version): Update version to 5.1.6 (#6645) 2025-01-21 13:28:44 -05:00
Prowler Bot
9ad4944142 fix(filters): fix dynamic filters (#6643)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-21 13:40:18 +01:00
Prowler Bot
7f33ea76a4 fix(OCSF): fix OCSF output when timestamp is UNIX format (#6627)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2025-01-20 18:03:51 -05:00
Prowler Bot
1140c29384 fix(aws): list tags for DocumentDB clusters (#6622)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-01-20 17:22:06 -05:00
Prowler Bot
2441a62f39 fix: update Azure CIS with existing App checks (#6625)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2025-01-20 16:27:14 -05:00
Pepe Fagoaga
c26a231fc1 chore(version): Update version to 5.1.5 (#6618) 2025-01-20 15:07:58 -05:00
Prowler Bot
2fb2315037 chore(RBAC): add permission's info (#6617)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-20 17:31:52 +01:00
Prowler Bot
a9e475481a fix(snippet-id): improve provider ID readability in tables (#6616)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-20 17:28:04 +01:00
Prowler Bot
826d7c4dc3 fix(rbac): remove invalid required permission (#6614)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
2025-01-20 17:02:31 +01:00
Prowler Bot
b7f4b37f66 feat(api): restrict the deletion of users, only the user of the request can be deleted (#6613)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
2025-01-20 17:02:21 +01:00
Prowler Bot
193d691bfe fix(RBAC): tweaks for edit role form (#6610)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-20 14:12:43 +01:00
Prowler Bot
a359bc581c fix(RBAC): restore manage_account permission for roles (#6603)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-20 11:53:08 +01:00
Prowler Bot
9a28ff025a fix(sqs): fix flaky test (#6595)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2025-01-17 12:40:11 -05:00
Prowler Bot
f1c7050700 fix(apigatewayv2): managed exception NotFoundException (#6590)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-01-17 09:27:19 -05:00
Pepe Fagoaga
9391c27b9e chore(version): Update version to 5.1.4 (#6591) 2025-01-17 09:25:35 -05:00
Prowler Bot
4c54de092f feat(findings): Add resource_tag filters for findings endpoint (#6587)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-17 19:01:51 +05:45
Prowler Bot
690c482a43 fix(gcp): fix flaky tests from dns service (#6571)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2025-01-17 08:15:34 -05:00
Prowler Bot
ad2d857c6f feat(findings): add /findings/metadata to retrieve dynamic filters information (#6586)
Co-authored-by: Víctor Fernández Poyatos <victor@prowler.com>
2025-01-17 18:47:59 +05:45
Pepe Fagoaga
07ee59d2ef chore(version): Update version to 5.1.3 (#6584) 2025-01-17 18:46:08 +05:45
Prowler Bot
bec4617d0a fix(providers): update the label and placeholder based on the cloud provider (#6582)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-17 12:33:25 +01:00
Prowler Bot
94916f8305 fix(findings): remove filter delta_in applied by default (#6579)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-17 11:06:07 +01:00
Prowler Bot
44de651be3 fix(cis): add subsections if needed (#6568)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-01-16 14:49:49 -05:00
Prowler Bot
bdcba9c642 fix(detect_secrets): refactor logic for detect-secrets (#6566)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-01-16 13:07:18 -05:00
Prowler Bot
c172f75f1a fix(dep): address compatibility issues (#6557)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-16 14:35:06 +01:00
Prowler Bot
ec492fa13a feat(filters): add resource type filter for findings (#6525)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-01-15 08:43:49 +01:00
Prowler Bot
702659959c fix(Azure TDE): add filter for master DB (#6514)
Co-authored-by: johannes-engler-mw <132657752+johannes-engler-mw@users.noreply.github.com>
2025-01-14 15:25:27 -05:00
Pepe Fagoaga
fef332a591 chore(version): set next fixes 2025-01-14 18:05:04 +01:00
1625 changed files with 135871 additions and 18412 deletions

34
.env
View File

@@ -3,7 +3,7 @@
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
#### Prowler UI Configuration ####
PROWLER_UI_VERSION="latest"
PROWLER_UI_VERSION="stable"
SITE_URL=http://localhost:3000
API_BASE_URL=http://prowler-api:8080/api/v1
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
@@ -30,6 +30,30 @@ VALKEY_HOST=valkey
VALKEY_PORT=6379
VALKEY_DB=0
# API scan settings
# The path to the directory where scan output should be stored
DJANGO_TMP_OUTPUT_DIRECTORY = "/tmp/prowler_api_output"
# The maximum number of findings to process in a single batch
DJANGO_FINDINGS_BATCH_SIZE = 1000
# The AWS access key to be used when uploading scan output to an S3 bucket
# If left empty, default AWS credentials resolution behavior will be used
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID=""
# The AWS secret key to be used when uploading scan output to an S3 bucket
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY=""
# An optional AWS session token
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN=""
# The AWS region where your S3 bucket is located (e.g., "us-east-1")
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION=""
# The name of the S3 bucket where scan output should be stored
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET=""
# Django settings
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
DJANGO_BIND_ADDRESS=0.0.0.0
@@ -92,3 +116,11 @@ jQIDAQAB
# openssl rand -base64 32
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
DJANGO_SENTRY_DSN=
# Sentry settings
SENTRY_ENVIRONMENT=local
SENTRY_RELEASE=local
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.4.0

10
.github/labeler.yml vendored
View File

@@ -92,3 +92,13 @@ component/api:
component/ui:
- changed-files:
- any-glob-to-any-file: "ui/**"
compliance:
- changed-files:
- any-glob-to-any-file: "prowler/compliance/**"
- any-glob-to-any-file: "prowler/lib/outputs/compliance/**"
- any-glob-to-any-file: "tests/lib/outputs/compliance/**"
review-django-migrations:
- changed-files:
- any-glob-to-any-file: "api/src/backend/api/migrations/**"

View File

@@ -15,7 +15,13 @@ Please include a summary of the change and which issue is fixed. List any depend
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
- [ ] Review if backport is needed.
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
#### API
- [ ] Verify if API specs need to be regenerated.
- [ ] Check if version updates are required (e.g., specs, Poetry, etc.).
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/api/CHANGELOG.md), if applicable.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -63,6 +63,12 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Set short git commit SHA
id: vars
run: |
shortSha=$(git rev-parse --short ${{ github.sha }})
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
- name: Login to DockerHub
uses: docker/login-action@v3
with:
@@ -82,6 +88,7 @@ jobs:
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
cache-from: type=gha
cache-to: type=gha,mode=max
@@ -96,3 +103,12 @@ jobs:
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Trigger deployment
if: github.event_name == 'push'
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}
event-type: prowler-api-deploy
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'

View File

@@ -6,6 +6,7 @@ on:
- "master"
- "v5.*"
paths:
- ".github/workflows/api-pull-request.yml"
- "api/**"
pull_request:
branches:
@@ -14,7 +15,6 @@ on:
paths:
- "api/**"
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
@@ -26,7 +26,8 @@ env:
VALKEY_HOST: localhost
VALKEY_PORT: 6379
VALKEY_DB: 0
API_WORKING_DIR: ./api
IMAGE_NAME: prowler-api
jobs:
test:
@@ -171,3 +172,18 @@ jobs:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: api
test-container-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build Container
uses: docker/build-push-action@v6
with:
context: ${{ env.API_WORKING_DIR }}
push: false
tags: ${{ env.IMAGE_NAME }}:latest
outputs: type=docker
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -0,0 +1,23 @@
name: Prowler - Conventional Commit
on:
pull_request:
types:
- "opened"
- "edited"
- "synchronize"
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
jobs:
conventional-commit-check:
runs-on: ubuntu-latest
steps:
- name: conventional-commit-check
id: conventional-commit-check
uses: agenthunt/conventional-commit-checker-action@v2.0.0
with:
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'

View File

@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.88.2
uses: trufflesecurity/trufflehog@v3.88.14
with:
path: ./
base: ${{ github.event.repository.default_branch }}

View File

@@ -39,6 +39,8 @@ jobs:
.backportrc.json
.env
docker-compose*
examples/**
.gitignore
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'

View File

@@ -63,6 +63,12 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Set short git commit SHA
id: vars
run: |
shortSha=$(git rev-parse --short ${{ github.sha }})
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
- name: Login to DockerHub
uses: docker/login-action@v3
with:
@@ -78,10 +84,13 @@ jobs:
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
cache-from: type=gha
cache-to: type=gha,mode=max
@@ -90,9 +99,20 @@ jobs:
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Trigger deployment
if: github.event_name == 'push'
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}
event-type: prowler-ui-deploy
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'

View File

@@ -6,6 +6,7 @@ on:
- "master"
- "v5.*"
paths:
- ".github/workflows/ui-pull-request.yml"
- "ui/**"
pull_request:
branches:
@@ -13,6 +14,9 @@ on:
- "v5.*"
paths:
- 'ui/**'
env:
UI_WORKING_DIR: ./ui
IMAGE_NAME: prowler-ui
jobs:
test-and-coverage:
@@ -39,3 +43,20 @@ jobs:
- name: Build the application
working-directory: ./ui
run: npm run build
test-container-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build Container
uses: docker/build-push-action@v6
with:
context: ${{ env.UI_WORKING_DIR }}
# Always build using `prod` target
target: prod
push: false
tags: ${{ env.IMAGE_NAME }}:latest
outputs: type=docker
build-args: |
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX

3
.gitignore vendored
View File

@@ -31,7 +31,7 @@ tags
*.DS_Store
# Prowler output
output/
/output
# Prowler found secrets
secrets-*/
@@ -45,6 +45,7 @@ junit-reports/
# Terraform
.terraform*
*.tfstate
*.tfstate.*
# .env
ui/.env*

View File

@@ -27,6 +27,7 @@ repos:
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v2.3.1
@@ -61,8 +62,25 @@ repos:
rev: 1.8.0
hooks:
- id: poetry-check
name: API - poetry-check
args: ["--directory=./api"]
pass_filenames: false
- id: poetry-lock
args: ["--no-update"]
name: API - poetry-lock
args: ["--no-update", "--directory=./api"]
pass_filenames: false
- id: poetry-check
name: SDK - poetry-check
args: ["--directory=./"]
pass_filenames: false
- id: poetry-lock
name: SDK - poetry-lock
args: ["--no-update", "--directory=./"]
pass_filenames: false
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta

View File

@@ -1,4 +1,4 @@
FROM python:3.12.8-alpine3.20
FROM python:3.12.9-alpine3.20
LABEL maintainer="https://github.com/prowler-cloud/prowler"

View File

@@ -71,10 +71,13 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 561 | 81 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 4 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 139 | 18 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
| AWS | 564 | 82 | 33 | 10 |
| GCP | 77 | 13 | 5 | 3 |
| Azure | 140 | 18 | 6 | 3 |
| Kubernetes | 83 | 7 | 2 | 7 |
| Microsoft365 | 5 | 2 | 1 | 0 |
> You can list the checks, services, compliance frameworks and categories with `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
# 💻 Installation
@@ -116,7 +119,7 @@ docker compose up -d
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
eval $(poetry env activate)
set -a
source .env
docker compose up postgres valkey -d
@@ -124,6 +127,11 @@ cd src/backend
python manage.py migrate --database admin
gunicorn -c config/guniconf.py config.wsgi:application
```
> [!IMPORTANT]
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
>
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
@@ -133,7 +141,7 @@ gunicorn -c config/guniconf.py config.wsgi:application
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
eval $(poetry env activate)
set -a
source .env
cd src/backend
@@ -146,7 +154,7 @@ python -m celery -A config.celery worker -l info -E
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
eval $(poetry env activate)
set -a
source .env
cd src/backend
@@ -167,7 +175,7 @@ npm start
## Prowler CLI
### Pip package
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python > 3.9.1, < 3.13:
```console
pip install prowler
@@ -197,15 +205,21 @@ The container images are available here:
### From GitHub
Python >= 3.9, < 3.13 is required with pip and poetry:
Python > 3.9.1, < 3.13 is required with pip and poetry:
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler
poetry shell
eval $(poetry env activate)
poetry install
python prowler.py -v
```
> [!IMPORTANT]
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
>
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
# 📐✏️ High level architecture

View File

@@ -23,6 +23,7 @@ DJANGO_SECRETS_ENCRYPTION_KEY=""
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
DJANGO_SENTRY_DSN=
# PostgreSQL settings
# If running django and celery on host, use 'localhost', else use 'postgres-db'
@@ -39,3 +40,16 @@ POSTGRES_DB=prowler_db
VALKEY_HOST=[localhost|valkey]
VALKEY_PORT=6379
VALKEY_DB=0
# Sentry settings
SENTRY_ENVIRONMENT=local
SENTRY_RELEASE=local
# Social login credentials
DJANGO_GOOGLE_OAUTH_CLIENT_ID=""
DJANGO_GOOGLE_OAUTH_CLIENT_SECRET=""
DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""

27
api/CHANGELOG.md Normal file
View File

@@ -0,0 +1,27 @@
# Prowler API Changelog
All notable changes to the **Prowler API** are documented in this file.
---
## [v1.5.0] (Prowler v5.4.0)
### Added
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
- Add API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878).
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
### Changed
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019).
---
## [v1.4.0] (Prowler v5.3.0)
### Changed
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
- Increase the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
---

View File

@@ -269,3 +269,66 @@ poetry shell
cd src/backend
pytest
```
# Custom commands
Django provides a way to create custom commands that can be run from the command line.
> These commands can be found in: ```prowler/api/src/backend/api/management/commands```
To run a custom command, you need to be in the `prowler/api/src/backend` directory and run:
```console
poetry shell
python manage.py <command_name>
```
## Generate dummy data
```console
python manage.py findings --tenant
<TENANT_ID> --findings <NUM_FINDINGS> --re
sources <NUM_RESOURCES> --batch <TRANSACTION_BATCH_SIZE> --alias <ALIAS>
```
This command creates, for a given tenant, a provider, scan and a set of findings and resources related altogether.
> Scan progress and state are updated in real time.
> - 0-33%: Create resources.
> - 33-66%: Create findings.
> - 66%: Create resource-finding mapping.
>
> The last step is required to access the findings details, since the UI needs that to print all the information.
### Example
```console
~/backend $ poetry run python manage.py findings --tenant
fffb1893-3fc7-4623-a5d9-fae47da1c528 --findings 25000 --re
sources 1000 --batch 5000 --alias test-script
Starting data population
Tenant: fffb1893-3fc7-4623-a5d9-fae47da1c528
Alias: test-script
Resources: 1000
Findings: 25000
Batch size: 5000
Creating resources...
100%|███████████████████████| 1/1 [00:00<00:00, 7.72it/s]
Resources created successfully.
Creating findings...
100%|███████████████████████| 5/5 [00:05<00:00, 1.09s/it]
Findings created successfully.
Creating resource-finding mappings...
100%|███████████████████████| 5/5 [00:02<00:00, 1.81it/s]
Resource-finding mappings created successfully.
Successfully populated test data.
```

View File

@@ -28,7 +28,7 @@ start_prod_server() {
start_worker() {
echo "Starting the worker..."
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion -E --max-tasks-per-child 1
}
start_worker_beat() {

1563
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,11 +8,12 @@ description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
name = "prowler-api"
package-mode = false
version = "1.1.0"
version = "1.5.0"
[tool.poetry.dependencies]
celery = {extras = ["pytest"], version = "^5.4.0"}
django = "5.1.4"
dj-rest-auth = {extras = ["with_social", "jwt"], version = "7.0.1"}
django = "5.1.5"
django-celery-beat = "^2.7.0"
django-celery-results = "^2.5.1"
django-cors-headers = "4.4.0"
@@ -27,16 +28,18 @@ drf-nested-routers = "^0.94.1"
drf-spectacular = "0.27.2"
drf-spectacular-jsonapi = "0.5.1"
gunicorn = "23.0.0"
prowler = "^5.0"
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "v5.4"}
psycopg2-binary = "2.9.9"
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
# Needed for prowler compatibility
python = ">=3.11,<3.13"
sentry-sdk = {extras = ["django"], version = "^2.20.0"}
uuid6 = "2024.7.10"
[tool.poetry.group.dev.dependencies]
bandit = "1.7.9"
coverage = "7.5.4"
django-silk = "5.3.2"
docker = "7.1.0"
freezegun = "1.5.1"
mypy = "1.10.1"
@@ -49,6 +52,7 @@ pytest-randomly = "3.15.0"
pytest-xdist = "3.6.1"
ruff = "0.5.0"
safety = "3.2.9"
tqdm = "4.67.1"
vulture = "2.14"
[tool.poetry.scripts]

View File

@@ -0,0 +1,57 @@
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from django.db import transaction
from api.db_router import MainRouter
from api.db_utils import rls_transaction
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
@staticmethod
def get_user_by_email(email: str):
try:
return User.objects.get(email=email)
except User.DoesNotExist:
return None
def pre_social_login(self, request, sociallogin):
# Link existing accounts with the same email address
email = sociallogin.account.extra_data.get("email")
if email:
existing_user = self.get_user_by_email(email)
if existing_user:
sociallogin.connect(request, existing_user)
def save_user(self, request, sociallogin, form=None):
"""
Called after the user data is fully populated from the provider
and is about to be saved to the DB for the first time.
"""
with transaction.atomic(using=MainRouter.admin_db):
user = super().save_user(request, sociallogin, form)
user.save(using=MainRouter.admin_db)
tenant = Tenant.objects.using(MainRouter.admin_db).create(
name=f"{user.email.split('@')[0]} default tenant"
)
with rls_transaction(str(tenant.id)):
Membership.objects.using(MainRouter.admin_db).create(
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
)
role = Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user,
role=role,
tenant_id=tenant.id,
)
return user

View File

@@ -1,18 +1,29 @@
ALLOWED_APPS = ("django", "socialaccount", "account", "authtoken", "silk")
class MainRouter:
default_db = "default"
admin_db = "admin"
def db_for_read(self, model, **hints): # noqa: F841
model_table_name = model._meta.db_table
if model_table_name.startswith("django_"):
if model_table_name.startswith("django_") or any(
model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS
):
return self.admin_db
return None
def db_for_write(self, model, **hints): # noqa: F841
model_table_name = model._meta.db_table
if model_table_name.startswith("django_"):
if any(model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS):
return self.admin_db
return None
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
return db == self.admin_db
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
# Allow relations if both objects are in either "default" or "admin" db connectors
if {obj1._state.db, obj2._state.db} <= {self.default_db, self.admin_db}:
return True
return None

View File

@@ -7,7 +7,7 @@ from rest_framework_json_api.serializers import ValidationError
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
def set_tenant(func):
def set_tenant(func=None, *, keep_tenant=False):
"""
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
@@ -40,20 +40,29 @@ def set_tenant(func):
# The tenant context will be set before the task logic executes.
"""
@wraps(func)
@transaction.atomic
def wrapper(*args, **kwargs):
try:
tenant_id = kwargs.pop("tenant_id")
except KeyError:
raise KeyError("This task requires the tenant_id")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
def decorator(func):
@wraps(func)
@transaction.atomic
def wrapper(*args, **kwargs):
try:
if not keep_tenant:
tenant_id = kwargs.pop("tenant_id")
else:
tenant_id = kwargs["tenant_id"]
except KeyError:
raise KeyError("This task requires the tenant_id")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
return func(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
return wrapper
if func is None:
return decorator
else:
return decorator(func)

View File

@@ -1,4 +1,4 @@
from datetime import date, datetime, timezone
from datetime import date, datetime, timedelta, timezone
from django.conf import settings
from django.db.models import Q
@@ -319,13 +319,41 @@ class FindingFilter(FilterSet):
field_name="resources__type", lookup_expr="icontains"
)
# Temporarily disabled until we implement tag filtering in the UI
# resource_tag_key = CharFilter(field_name="resources__tags__key")
# resource_tag_key__in = CharInFilter(
# field_name="resources__tags__key", lookup_expr="in"
# )
# resource_tag_key__icontains = CharFilter(
# field_name="resources__tags__key", lookup_expr="icontains"
# )
# resource_tag_value = CharFilter(field_name="resources__tags__value")
# resource_tag_value__in = CharInFilter(
# field_name="resources__tags__value", lookup_expr="in"
# )
# resource_tag_value__icontains = CharFilter(
# field_name="resources__tags__value", lookup_expr="icontains"
# )
# resource_tags = CharInFilter(
# method="filter_resource_tag",
# lookup_expr="in",
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
# "separated by commas.",
# )
scan = UUIDFilter(method="filter_scan_id")
scan__in = UUIDInFilter(method="filter_scan_id_in")
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
inserted_at__gte = DateFilter(method="filter_inserted_at_gte")
inserted_at__lte = DateFilter(method="filter_inserted_at_lte")
inserted_at__gte = DateFilter(
method="filter_inserted_at_gte",
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
)
inserted_at__lte = DateFilter(
method="filter_inserted_at_lte",
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
)
class Meta:
model = Finding
@@ -353,6 +381,52 @@ class FindingFilter(FilterSet):
},
}
def filter_queryset(self, queryset):
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
self.data.get("inserted_at")
or self.data.get("inserted_at__date")
or self.data.get("inserted_at__gte")
or self.data.get("inserted_at__lte")
):
raise ValidationError(
[
{
"detail": "At least one date filter is required: filter[inserted_at], filter[inserted_at.gte], "
"or filter[inserted_at.lte].",
"status": 400,
"source": {"pointer": "/data/attributes/inserted_at"},
"code": "required",
}
]
)
gte_date = (
datetime.strptime(self.data.get("inserted_at__gte"), "%Y-%m-%d").date()
if self.data.get("inserted_at__gte")
else datetime.now(timezone.utc).date()
)
lte_date = (
datetime.strptime(self.data.get("inserted_at__lte"), "%Y-%m-%d").date()
if self.data.get("inserted_at__lte")
else datetime.now(timezone.utc).date()
)
if abs(lte_date - gte_date) > timedelta(
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
):
raise ValidationError(
[
{
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
"status": 400,
"source": {"pointer": "/data/attributes/inserted_at"},
"code": "invalid",
}
]
)
return super().filter_queryset(queryset)
# Convert filter values to UUIDv7 values for use with partitioning
def filter_scan_id(self, queryset, name, value):
try:
@@ -373,9 +447,7 @@ class FindingFilter(FilterSet):
)
return (
queryset.filter(id__gte=start)
.filter(id__lt=end)
.filter(scan__id=value_uuid)
queryset.filter(id__gte=start).filter(id__lt=end).filter(scan_id=value_uuid)
)
def filter_scan_id_in(self, queryset, name, value):
@@ -400,31 +472,42 @@ class FindingFilter(FilterSet):
]
)
if start == end:
return queryset.filter(id__gte=start).filter(scan__id__in=uuid_list)
return queryset.filter(id__gte=start).filter(scan_id__in=uuid_list)
else:
return (
queryset.filter(id__gte=start)
.filter(id__lt=end)
.filter(scan__id__in=uuid_list)
.filter(scan_id__in=uuid_list)
)
def filter_inserted_at(self, queryset, name, value):
value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(value))
datetime_value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(datetime_value))
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
return queryset.filter(id__gte=start).filter(inserted_at__date=value)
return queryset.filter(id__gte=start, id__lt=end)
def filter_inserted_at_gte(self, queryset, name, value):
value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(value))
datetime_value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(datetime_value))
return queryset.filter(id__gte=start).filter(inserted_at__gte=value)
return queryset.filter(id__gte=start)
def filter_inserted_at_lte(self, queryset, name, value):
value = self.maybe_date_to_datetime(value)
end = uuid7_start(datetime_to_uuid7(value))
datetime_value = self.maybe_date_to_datetime(value)
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
return queryset.filter(id__lt=end)
def filter_resource_tag(self, queryset, name, value):
overall_query = Q()
for key_value_pair in value:
tag_key, tag_value = key_value_pair.split(":", 1)
overall_query |= Q(
resources__tags__key__icontains=tag_key,
resources__tags__value__icontains=tag_value,
)
return queryset.filter(overall_query).distinct()
@staticmethod
def maybe_date_to_datetime(value):

View File

@@ -122,6 +122,22 @@
"scanner_args": {}
}
},
{
"model": "api.provider",
"pk": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:45:26.352Z",
"updated_at": "2024-10-18T11:16:23.533Z",
"provider": "kubernetes",
"uid": "gke_lucky-coast-419309_us-central1_autopilot-cluster-2",
"alias": "k8s_testing_2",
"connected": true,
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
"metadata": {},
"scanner_args": {}
}
},
{
"model": "api.providersecret",
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",

View File

@@ -11,9 +11,7 @@
"unique_resource_count": 1,
"duration": 5,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
"checks_to_execute": ["accessanalyzer_enabled"]
},
"inserted_at": "2024-09-01T17:25:27.050Z",
"started_at": "2024-09-01T17:25:27.050Z",
@@ -33,9 +31,7 @@
"unique_resource_count": 1,
"duration": 20,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
"checks_to_execute": ["accessanalyzer_enabled"]
},
"inserted_at": "2024-09-02T17:24:27.050Z",
"started_at": "2024-09-02T17:24:27.050Z",
@@ -55,9 +51,7 @@
"unique_resource_count": 10,
"duration": 10,
"scanner_args": {
"checks_to_execute": [
"cloudsql_instance_automated_backups"
]
"checks_to_execute": ["cloudsql_instance_automated_backups"]
},
"inserted_at": "2024-09-02T19:26:27.050Z",
"started_at": "2024-09-02T19:26:27.050Z",
@@ -77,9 +71,7 @@
"unique_resource_count": 1,
"duration": 35,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
"checks_to_execute": ["accessanalyzer_enabled"]
},
"inserted_at": "2024-09-02T19:27:27.050Z",
"started_at": "2024-09-02T19:27:27.050Z",
@@ -97,9 +89,7 @@
"name": "test scheduled aws scan",
"state": "available",
"scanner_args": {
"checks_to_execute": [
"cloudformation_stack_outputs_find_secrets"
]
"checks_to_execute": ["cloudformation_stack_outputs_find_secrets"]
},
"scheduled_at": "2030-09-02T19:20:27.050Z",
"inserted_at": "2024-09-02T19:24:27.050Z",
@@ -178,9 +168,7 @@
"unique_resource_count": 19,
"progress": 100,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
"checks_to_execute": ["accessanalyzer_enabled"]
},
"duration": 7,
"scheduled_at": null,
@@ -190,6 +178,56 @@
"completed_at": "2024-10-18T10:46:05.127Z"
}
},
{
"model": "api.scan",
"pk": "6dd8925f-a52d-48de-a546-d2d90db30ab1",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "real scan azure",
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
"trigger": "manual",
"state": "completed",
"unique_resource_count": 20,
"progress": 100,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled",
"account_security_contact_information_is_registered"
]
},
"duration": 4,
"scheduled_at": null,
"inserted_at": "2024-10-18T11:16:21.358Z",
"updated_at": "2024-10-18T11:16:26.060Z",
"started_at": "2024-10-18T11:16:21.593Z",
"completed_at": "2024-10-18T11:16:26.060Z"
}
},
{
"model": "api.scan",
"pk": "4ca7ce89-3236-41a8-a369-8937bc152af5",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "real scan k8s",
"provider": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
"trigger": "manual",
"state": "completed",
"unique_resource_count": 20,
"progress": 100,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled",
"account_security_contact_information_is_registered"
]
},
"duration": 4,
"scheduled_at": null,
"inserted_at": "2024-10-18T11:16:21.358Z",
"updated_at": "2024-10-18T11:16:26.060Z",
"started_at": "2024-10-18T11:16:21.593Z",
"completed_at": "2024-10-18T11:16:26.060Z"
}
},
{
"model": "api.scan",
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",

View File

@@ -6,6 +6,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.823Z",
"updated_at": "2024-10-18T10:46:04.841Z",
"first_seen_at": "2024-10-18T10:46:04.823Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -61,6 +62,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.855Z",
"updated_at": "2024-10-18T10:46:04.858Z",
"first_seen_at": "2024-10-18T10:46:04.855Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
"delta": "new",
"status": "FAIL",
@@ -116,6 +118,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.869Z",
"updated_at": "2024-10-18T10:46:04.876Z",
"first_seen_at": "2024-10-18T10:46:04.869Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -171,6 +174,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.888Z",
"updated_at": "2024-10-18T10:46:04.892Z",
"first_seen_at": "2024-10-18T10:46:04.888Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -226,6 +230,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.901Z",
"updated_at": "2024-10-18T10:46:04.905Z",
"first_seen_at": "2024-10-18T10:46:04.901Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -281,6 +286,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.915Z",
"updated_at": "2024-10-18T10:46:04.919Z",
"first_seen_at": "2024-10-18T10:46:04.915Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -336,6 +342,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.929Z",
"updated_at": "2024-10-18T10:46:04.934Z",
"first_seen_at": "2024-10-18T10:46:04.929Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -391,6 +398,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.944Z",
"updated_at": "2024-10-18T10:46:04.947Z",
"first_seen_at": "2024-10-18T10:46:04.944Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -446,6 +454,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.957Z",
"updated_at": "2024-10-18T10:46:04.962Z",
"first_seen_at": "2024-10-18T10:46:04.957Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
"delta": "new",
"status": "PASS",
@@ -501,6 +510,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.971Z",
"updated_at": "2024-10-18T10:46:04.975Z",
"first_seen_at": "2024-10-18T10:46:04.971Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -556,6 +566,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.984Z",
"updated_at": "2024-10-18T10:46:04.989Z",
"first_seen_at": "2024-10-18T10:46:04.984Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -611,6 +622,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.999Z",
"updated_at": "2024-10-18T10:46:05.003Z",
"first_seen_at": "2024-10-18T10:46:04.999Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -666,6 +678,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.013Z",
"updated_at": "2024-10-18T10:46:05.018Z",
"first_seen_at": "2024-10-18T10:46:05.013Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -721,6 +734,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.029Z",
"updated_at": "2024-10-18T10:46:05.033Z",
"first_seen_at": "2024-10-18T10:46:05.029Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -776,6 +790,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.045Z",
"updated_at": "2024-10-18T10:46:05.050Z",
"first_seen_at": "2024-10-18T10:46:05.045Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -831,6 +846,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.061Z",
"updated_at": "2024-10-18T10:46:05.065Z",
"first_seen_at": "2024-10-18T10:46:05.061Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -886,6 +902,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.080Z",
"updated_at": "2024-10-18T10:46:05.085Z",
"first_seen_at": "2024-10-18T10:46:05.080Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -941,6 +958,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.099Z",
"updated_at": "2024-10-18T10:46:05.104Z",
"first_seen_at": "2024-10-18T10:46:05.099Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -996,6 +1014,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.115Z",
"updated_at": "2024-10-18T10:46:05.121Z",
"first_seen_at": "2024-10-18T10:46:05.115Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
"delta": "new",
"status": "FAIL",
@@ -1051,6 +1070,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.489Z",
"updated_at": "2024-10-18T11:16:24.506Z",
"first_seen_at": "2024-10-18T10:46:04.823Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1106,6 +1126,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.518Z",
"updated_at": "2024-10-18T11:16:24.521Z",
"first_seen_at": "2024-10-18T10:46:04.855Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
"delta": null,
"status": "FAIL",
@@ -1161,6 +1182,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.526Z",
"updated_at": "2024-10-18T11:16:24.529Z",
"first_seen_at": "2024-10-18T10:46:04.869Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1216,6 +1238,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.535Z",
"updated_at": "2024-10-18T11:16:24.538Z",
"first_seen_at": "2024-10-18T10:46:04.888Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1271,6 +1294,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.544Z",
"updated_at": "2024-10-18T11:16:24.546Z",
"first_seen_at": "2024-10-18T10:46:04.901Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1326,6 +1350,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.551Z",
"updated_at": "2024-10-18T11:16:24.554Z",
"first_seen_at": "2024-10-18T10:46:04.915Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1381,6 +1406,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.560Z",
"updated_at": "2024-10-18T11:16:24.562Z",
"first_seen_at": "2024-10-18T10:46:04.929Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1436,6 +1462,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.567Z",
"updated_at": "2024-10-18T11:16:24.569Z",
"first_seen_at": "2024-10-18T10:46:04.944Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1491,6 +1518,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.573Z",
"updated_at": "2024-10-18T11:16:24.575Z",
"first_seen_at": "2024-10-18T10:46:04.957Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
"delta": null,
"status": "PASS",
@@ -1546,6 +1574,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.580Z",
"updated_at": "2024-10-18T11:16:24.582Z",
"first_seen_at": "2024-10-18T10:46:04.971Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1601,6 +1630,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.587Z",
"updated_at": "2024-10-18T11:16:24.589Z",
"first_seen_at": "2024-10-18T10:46:04.984Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1656,6 +1686,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.595Z",
"updated_at": "2024-10-18T11:16:24.597Z",
"first_seen_at": "2024-10-18T10:46:04.999Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1711,6 +1742,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.602Z",
"updated_at": "2024-10-18T11:16:24.604Z",
"first_seen_at": "2024-10-18T10:46:05.013Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1766,6 +1798,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.610Z",
"updated_at": "2024-10-18T11:16:24.612Z",
"first_seen_at": "2024-10-18T10:46:05.029Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1821,6 +1854,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.617Z",
"updated_at": "2024-10-18T11:16:24.620Z",
"first_seen_at": "2024-10-18T10:46:05.045Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1876,6 +1910,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.625Z",
"updated_at": "2024-10-18T11:16:24.627Z",
"first_seen_at": "2024-10-18T10:46:05.061Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1931,6 +1966,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.632Z",
"updated_at": "2024-10-18T11:16:24.634Z",
"first_seen_at": "2024-10-18T10:46:05.080Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1986,6 +2022,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.639Z",
"updated_at": "2024-10-18T11:16:24.642Z",
"first_seen_at": "2024-10-18T10:46:05.099Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -2041,6 +2078,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.646Z",
"updated_at": "2024-10-18T11:16:24.648Z",
"first_seen_at": "2024-10-18T10:46:05.115Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
"delta": null,
"status": "FAIL",
@@ -2096,6 +2134,7 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:26.033Z",
"updated_at": "2024-10-18T11:16:26.045Z",
"first_seen_at": "2024-10-18T11:16:26.033Z",
"uid": "prowler-aws-account_security_contact_information_is_registered-112233445566-us-east-1-112233445566",
"delta": "new",
"status": "MANUAL",

View File

@@ -0,0 +1,237 @@
import random
from datetime import datetime, timezone
from math import ceil
from uuid import uuid4
from django.core.management.base import BaseCommand
from tqdm import tqdm
from api.db_utils import rls_transaction
from api.models import (
Finding,
Provider,
Resource,
ResourceFindingMapping,
Scan,
StatusChoices,
)
from prowler.lib.check.models import CheckMetadata
class Command(BaseCommand):
help = "Populates the database with test data for performance testing."
def add_arguments(self, parser):
parser.add_argument(
"--tenant",
type=str,
required=True,
help="Tenant id for which the data will be populated.",
)
parser.add_argument(
"--resources",
type=int,
required=True,
help="The number of resources to create.",
)
parser.add_argument(
"--findings",
type=int,
required=True,
help="The number of findings to create.",
)
parser.add_argument(
"--batch", type=int, required=True, help="The batch size for bulk creation."
)
parser.add_argument(
"--alias",
type=str,
required=False,
help="Optional alias for the provider and scan",
)
def handle(self, *args, **options):
tenant_id = options["tenant"]
num_resources = options["resources"]
num_findings = options["findings"]
batch_size = options["batch"]
alias = options["alias"] or "Testing"
uid_token = str(uuid4())
self.stdout.write(self.style.NOTICE("Starting data population"))
self.stdout.write(self.style.NOTICE(f"\tTenant: {tenant_id}"))
self.stdout.write(self.style.NOTICE(f"\tAlias: {alias}"))
self.stdout.write(self.style.NOTICE(f"\tResources: {num_resources}"))
self.stdout.write(self.style.NOTICE(f"\tFindings: {num_findings}"))
self.stdout.write(self.style.NOTICE(f"\tBatch size: {batch_size}\n\n"))
# Resource metadata
possible_regions = [
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2",
"ca-central-1",
"eu-central-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"ap-southeast-1",
"ap-southeast-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-south-1",
"sa-east-1",
]
possible_services = []
possible_types = []
bulk_check_metadata = CheckMetadata.get_bulk(provider="aws")
for check_metadata in bulk_check_metadata.values():
if check_metadata.ServiceName not in possible_services:
possible_services.append(check_metadata.ServiceName)
if (
check_metadata.ResourceType
and check_metadata.ResourceType not in possible_types
):
possible_types.append(check_metadata.ResourceType)
with rls_transaction(tenant_id):
provider, _ = Provider.all_objects.get_or_create(
tenant_id=tenant_id,
provider="aws",
connected=True,
uid=str(random.randint(100000000000, 999999999999)),
defaults={
"alias": alias,
},
)
with rls_transaction(tenant_id):
scan = Scan.all_objects.create(
tenant_id=tenant_id,
provider=provider,
name=alias,
trigger="manual",
state="executing",
progress=0,
started_at=datetime.now(timezone.utc),
)
scan_state = "completed"
try:
# Create resources
resources = []
for i in range(num_resources):
resources.append(
Resource(
tenant_id=tenant_id,
provider_id=provider.id,
uid=f"testing-{uid_token}-{i}",
name=f"Testing {uid_token}-{i}",
region=random.choice(possible_regions),
service=random.choice(possible_services),
type=random.choice(possible_types),
)
)
num_batches = ceil(len(resources) / batch_size)
self.stdout.write(self.style.WARNING("Creating resources..."))
for i in tqdm(range(0, len(resources), batch_size), total=num_batches):
with rls_transaction(tenant_id):
Resource.all_objects.bulk_create(resources[i : i + batch_size])
self.stdout.write(self.style.SUCCESS("Resources created successfully.\n\n"))
with rls_transaction(tenant_id):
scan.progress = 33
scan.save()
# Create Findings
findings = []
possible_deltas = ["new", "changed", None]
possible_severities = ["critical", "high", "medium", "low"]
findings_resources_mapping = []
for i in range(num_findings):
severity = random.choice(possible_severities)
check_id = random.randint(1, 1000)
assigned_resource_num = random.randint(0, len(resources) - 1)
assigned_resource = resources[assigned_resource_num]
findings_resources_mapping.append(assigned_resource_num)
findings.append(
Finding(
tenant_id=tenant_id,
scan=scan,
uid=f"testing-{uid_token}-{i}",
delta=random.choice(possible_deltas),
check_id=f"check-{check_id}",
status=random.choice(list(StatusChoices)),
severity=severity,
impact=severity,
raw_result={},
check_metadata={
"checktitle": f"Test title for check {check_id}",
"risk": f"Testing risk {uid_token}-{i}",
"provider": "aws",
"severity": severity,
"categories": ["category1", "category2", "category3"],
"description": "This is a random description that should not matter for testing purposes.",
"servicename": assigned_resource.service,
"resourcetype": assigned_resource.type,
},
)
)
num_batches = ceil(len(findings) / batch_size)
self.stdout.write(self.style.WARNING("Creating findings..."))
for i in tqdm(range(0, len(findings), batch_size), total=num_batches):
with rls_transaction(tenant_id):
Finding.all_objects.bulk_create(findings[i : i + batch_size])
self.stdout.write(self.style.SUCCESS("Findings created successfully.\n\n"))
with rls_transaction(tenant_id):
scan.progress = 66
scan.save()
# Create ResourceFindingMapping
mappings = []
for index, f in enumerate(findings):
mappings.append(
ResourceFindingMapping(
tenant_id=tenant_id,
resource=resources[findings_resources_mapping[index]],
finding=f,
)
)
num_batches = ceil(len(mappings) / batch_size)
self.stdout.write(
self.style.WARNING("Creating resource-finding mappings...")
)
for i in tqdm(range(0, len(mappings), batch_size), total=num_batches):
with rls_transaction(tenant_id):
ResourceFindingMapping.objects.bulk_create(
mappings[i : i + batch_size]
)
self.stdout.write(
self.style.SUCCESS(
"Resource-finding mappings created successfully.\n\n"
)
)
except Exception as e:
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
scan_state = "failed"
finally:
scan.completed_at = datetime.now(timezone.utc)
scan.duration = int(
(datetime.now(timezone.utc) - scan.started_at).total_seconds()
)
scan.progress = 100
scan.state = scan_state
scan.unique_resource_count = num_resources
with rls_transaction(tenant_id):
scan.save()
self.stdout.write(self.style.NOTICE("Successfully populated test data."))

View File

@@ -0,0 +1,15 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0005_rbac_missing_admin_roles"),
]
operations = [
migrations.AddField(
model_name="finding",
name="first_seen_at",
field=models.DateTimeField(editable=False, null=True),
),
]

View File

@@ -0,0 +1,25 @@
# Generated by Django 5.1.5 on 2025-01-28 15:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0006_findings_first_seen"),
]
operations = [
migrations.AddIndex(
model_name="scan",
index=models.Index(
fields=["tenant_id", "provider_id", "state", "inserted_at"],
name="scans_prov_state_insert_idx",
),
),
migrations.AddIndex(
model_name="scansummary",
index=models.Index(
fields=["tenant_id", "scan_id"], name="scan_summaries_tenant_scan_idx"
),
),
]

View File

@@ -0,0 +1,64 @@
import json
from datetime import datetime, timedelta, timezone
import django.db.models.deletion
from django.db import migrations, models
from django_celery_beat.models import PeriodicTask
from api.db_utils import rls_transaction
from api.models import Scan, StateChoices
def migrate_daily_scheduled_scan_tasks(apps, schema_editor):
for daily_scheduled_scan_task in PeriodicTask.objects.filter(
task="scan-perform-scheduled"
):
task_kwargs = json.loads(daily_scheduled_scan_task.kwargs)
tenant_id = task_kwargs["tenant_id"]
provider_id = task_kwargs["provider_id"]
current_time = datetime.now(timezone.utc)
scheduled_time_today = datetime.combine(
current_time.date(),
daily_scheduled_scan_task.start_time.time(),
tzinfo=timezone.utc,
)
if current_time < scheduled_time_today:
next_scan_date = scheduled_time_today
else:
next_scan_date = scheduled_time_today + timedelta(days=1)
with rls_transaction(tenant_id):
Scan.objects.create(
tenant_id=tenant_id,
name="Daily scheduled scan",
provider_id=provider_id,
trigger=Scan.TriggerChoices.SCHEDULED,
state=StateChoices.SCHEDULED,
scheduled_at=next_scan_date,
scheduler_task_id=daily_scheduled_scan_task.id,
)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0007_scan_and_scan_summaries_indexes"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
migrations.AddField(
model_name="scan",
name="scheduler_task",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="django_celery_beat.periodictask",
),
),
migrations.RunPython(migrate_daily_scheduled_scan_tasks),
]

View File

@@ -0,0 +1,22 @@
# Generated by Django 5.1.5 on 2025-02-07 09:42
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0008_daily_scheduled_tasks_update"),
]
operations = [
migrations.AlterField(
model_name="provider",
name="uid",
field=models.CharField(
max_length=250,
validators=[django.core.validators.MinLengthValidator(3)],
verbose_name="Unique identifier for the provider, set by the provider",
),
),
]

View File

@@ -0,0 +1,109 @@
from functools import partial
from django.db import connection, migrations
def create_index_on_partitions(
apps, schema_editor, parent_table: str, index_name: str, index_details: str
):
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT inhrelid::regclass::text
FROM pg_inherits
WHERE inhparent = %s::regclass;
""",
[parent_table],
)
partitions = [row[0] for row in cursor.fetchall()]
# Iterate over partitions and create index concurrently.
# Note: PostgreSQL does not allow CONCURRENTLY inside a transaction,
# so we need atomic = False for this migration.
for partition in partitions:
sql = (
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {partition.replace('.', '_')}_{index_name} ON {partition} "
f"{index_details};"
)
schema_editor.execute(sql)
def drop_index_on_partitions(apps, schema_editor, parent_table: str, index_name: str):
with schema_editor.connection.cursor() as cursor:
cursor.execute(
"""
SELECT inhrelid::regclass::text
FROM pg_inherits
WHERE inhparent = %s::regclass;
""",
[parent_table],
)
partitions = [row[0] for row in cursor.fetchall()]
# Iterate over partitions and drop index concurrently.
for partition in partitions:
partition_index = f"{partition.replace('.', '_')}_{index_name}"
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {partition_index};"
schema_editor.execute(sql)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0009_increase_provider_uid_maximum_length"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="findings_tenant_and_id_idx",
index_details="(tenant_id, id)",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="findings_tenant_and_id_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_idx",
index_details="(tenant_id, scan_id)",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_id_idx",
index_details="(tenant_id, scan_id, id)",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_id_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_delta_new_idx",
index_details="(tenant_id, id) where delta = 'new'",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_delta_new_idx",
),
),
]

View File

@@ -0,0 +1,49 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0010_findings_performance_indexes_partitions"),
]
operations = [
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "scan_id", "id"], name="find_tenant_scan_id_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=models.Index(
condition=models.Q(("delta", "new")),
fields=["tenant_id", "id"],
name="find_delta_new_idx",
),
),
migrations.AddIndex(
model_name="resourcetagmapping",
index=models.Index(
fields=["tenant_id", "resource_id"], name="resource_tag_tenant_idx"
),
),
migrations.AddIndex(
model_name="resource",
index=models.Index(
fields=["tenant_id", "service", "region", "type"],
name="resource_tenant_metadata_idx",
),
),
]

View File

@@ -0,0 +1,15 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0011_findings_performance_indexes_parent"),
]
operations = [
migrations.AddField(
model_name="scan",
name="output_location",
field=models.CharField(blank=True, max_length=200, null=True),
),
]

View File

@@ -11,6 +11,7 @@ from django.core.validators import MinLengthValidator
from django.db import models
from django.db.models import Q
from django.utils.translation import gettext_lazy as _
from django_celery_beat.models import PeriodicTask
from django_celery_results.models import TaskResult
from psqlextra.manager import PostgresManager
from psqlextra.models import PostgresPartitionedModel
@@ -226,13 +227,13 @@ class Provider(RowLevelSecurityProtectedModel):
@staticmethod
def validate_kubernetes_uid(value):
if not re.match(
r"(^[a-z0-9]([-a-z0-9]{1,61}[a-z0-9])?$)|(^arn:aws(-cn|-us-gov|-iso|-iso-b)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/:\.\*]+(:\d+)?$)",
r"^[a-z0-9][A-Za-z0-9_.:\/-]{1,250}$",
value,
):
raise ModelValidationError(
detail="The value must either be a valid Kubernetes UID (up to 63 characters, "
"starting and ending with a lowercase letter or number, containing only "
"lowercase alphanumeric characters and hyphens) or a valid EKS ARN.",
"lowercase alphanumeric characters and hyphens) or a valid AWS EKS Cluster ARN, GCP GKE Context Name or Azure AKS Cluster Name.",
code="kubernetes-uid",
pointer="/data/attributes/uid",
)
@@ -246,7 +247,7 @@ class Provider(RowLevelSecurityProtectedModel):
)
uid = models.CharField(
"Unique identifier for the provider, set by the provider",
max_length=63,
max_length=250,
blank=False,
validators=[MinLengthValidator(3)],
)
@@ -410,6 +411,10 @@ class Scan(RowLevelSecurityProtectedModel):
started_at = models.DateTimeField(null=True, blank=True)
completed_at = models.DateTimeField(null=True, blank=True)
next_scan_at = models.DateTimeField(null=True, blank=True)
scheduler_task = models.ForeignKey(
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
)
output_location = models.CharField(blank=True, null=True, max_length=200)
# TODO: mutelist foreign key
class Meta(RowLevelSecurityProtectedModel.Meta):
@@ -428,6 +433,10 @@ class Scan(RowLevelSecurityProtectedModel):
fields=["provider", "state", "trigger", "scheduled_at"],
name="scans_prov_state_trig_sche_idx",
),
models.Index(
fields=["tenant_id", "provider_id", "state", "inserted_at"],
name="scans_prov_state_insert_idx",
),
]
class JSONAPIMeta:
@@ -544,6 +553,10 @@ class Resource(RowLevelSecurityProtectedModel):
fields=["uid", "region", "service", "name"],
name="resource_uid_reg_serv_name_idx",
),
models.Index(
fields=["tenant_id", "service", "region", "type"],
name="resource_tenant_metadata_idx",
),
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
]
@@ -591,6 +604,12 @@ class ResourceTagMapping(RowLevelSecurityProtectedModel):
),
]
indexes = [
models.Index(
fields=["tenant_id", "resource_id"], name="resource_tag_tenant_idx"
),
]
class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
"""
@@ -615,6 +634,7 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid7, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
first_seen_at = models.DateTimeField(editable=False, null=True)
uid = models.CharField(max_length=300)
delta = FindingDeltaEnumField(
@@ -688,7 +708,17 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
],
name="findings_filter_idx",
),
models.Index(fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"),
GinIndex(fields=["text_search"], name="gin_findings_search_idx"),
models.Index(fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"),
models.Index(
fields=["tenant_id", "scan_id", "id"], name="find_tenant_scan_id_idx"
),
models.Index(
fields=["tenant_id", "id"],
condition=Q(delta="new"),
name="find_delta_new_idx",
),
]
class JSONAPIMeta:
@@ -1099,6 +1129,12 @@ class ScanSummary(RowLevelSecurityProtectedModel):
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
indexes = [
models.Index(
fields=["tenant_id", "scan_id"],
name="scan_summaries_tenant_scan_idx",
)
]
class JSONAPIMeta:
resource_name = "scan-summaries"

File diff suppressed because it is too large Load Diff

View File

@@ -3,6 +3,8 @@ from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
from django.urls import reverse
from rest_framework.test import APIClient
from api.models import Membership, User
@pytest.mark.django_db
def test_basic_authentication():
@@ -177,3 +179,122 @@ def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fi
user2_me = client.get(reverse("user-me"), headers=user2_headers)
assert user2_me.status_code == 200
assert user2_me.json()["data"]["attributes"]["email"] == user2_email
@pytest.mark.django_db
class TestTokenSwitchTenant:
def test_switch_tenant_with_valid_token(self, tenants_fixture, providers_fixture):
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "test_password"
# Check that we can create a new user without any kind of authentication
user_creation_response = client.post(
reverse("user-list"),
data={
"data": {
"type": "users",
"attributes": {
"name": "test",
"email": test_user,
"password": test_password,
},
}
},
format="vnd.api+json",
)
assert user_creation_response.status_code == 201
# Create a new relationship between this user and another tenant
tenant_id = tenants_fixture[0].id
user_instance = User.objects.get(email=test_user)
Membership.objects.create(user=user_instance, tenant_id=tenant_id)
# Check that using our new user's credentials we can authenticate and get the providers
access_token, _ = get_api_tokens(client, test_user, test_password)
auth_headers = get_authorization_header(access_token)
user_me_response = client.get(
reverse("user-me"),
headers=auth_headers,
)
assert user_me_response.status_code == 200
# Assert this user belongs to two tenants
assert (
user_me_response.json()["data"]["relationships"]["memberships"]["meta"][
"count"
]
== 2
)
provider_response = client.get(
reverse("provider-list"),
headers=auth_headers,
)
assert provider_response.status_code == 200
# Empty response since there are no providers in this tenant
assert not provider_response.json()["data"]
switch_tenant_response = client.post(
reverse("token-switch"),
data={
"data": {
"type": "tokens-switch-tenant",
"attributes": {"tenant_id": tenant_id},
}
},
headers=auth_headers,
)
assert switch_tenant_response.status_code == 200
new_access_token = switch_tenant_response.json()["data"]["attributes"]["access"]
new_auth_headers = get_authorization_header(new_access_token)
provider_response = client.get(
reverse("provider-list"),
headers=new_auth_headers,
)
assert provider_response.status_code == 200
# Now it must be data because we switched to another tenant with providers
assert provider_response.json()["data"]
def test_switch_tenant_with_invalid_token(self, create_test_user, tenants_fixture):
client = APIClient()
access_token, refresh_token = get_api_tokens(
client, create_test_user.email, TEST_PASSWORD
)
auth_headers = get_authorization_header(access_token)
invalid_token_response = client.post(
reverse("token-switch"),
data={
"data": {
"type": "tokens-switch-tenant",
"attributes": {"tenant_id": "invalid_tenant_id"},
}
},
headers=auth_headers,
)
assert invalid_token_response.status_code == 400
assert invalid_token_response.json()["errors"][0]["code"] == "invalid"
assert (
invalid_token_response.json()["errors"][0]["detail"]
== "Must be a valid UUID."
)
invalid_tenant_response = client.post(
reverse("token-switch"),
data={
"data": {
"type": "tokens-switch-tenant",
"attributes": {"tenant_id": tenants_fixture[-1].id},
}
},
headers=auth_headers,
)
assert invalid_tenant_response.status_code == 400
assert invalid_tenant_response.json()["errors"][0]["code"] == "invalid"
assert invalid_tenant_response.json()["errors"][0]["detail"] == (
"Tenant does not exist or user is not a " "member."
)

View File

@@ -274,9 +274,10 @@ class TestValidateInvitation:
expired_time = datetime.now(timezone.utc) - timedelta(days=1)
invitation.expires_at = expired_time
with patch("api.utils.Invitation.objects.using") as mock_using, patch(
"api.utils.datetime"
) as mock_datetime:
with (
patch("api.utils.Invitation.objects.using") as mock_using,
patch("api.utils.datetime") as mock_datetime,
):
mock_db = mock_using.return_value
mock_db.get.return_value = invitation
mock_datetime.now.return_value = datetime.now(timezone.utc)

View File

@@ -1,10 +1,15 @@
import glob
import io
import json
import os
from datetime import datetime, timedelta, timezone
from unittest.mock import ANY, Mock, patch
import jwt
import pytest
from botocore.exceptions import NoCredentialsError
from conftest import API_JSON_CONTENT_TYPE, TEST_PASSWORD, TEST_USER
from django.conf import settings
from django.urls import reverse
from rest_framework import status
@@ -19,6 +24,7 @@ from api.models import (
RoleProviderGroupRelationship,
Scan,
StateChoices,
Task,
User,
UserRoleRelationship,
)
@@ -27,6 +33,12 @@ from api.rls import Tenant
TODAY = str(datetime.today().date())
def today_after_n_days(n_days: int) -> str:
return datetime.strftime(
datetime.today().date() + timedelta(days=n_days), "%Y-%m-%d"
)
@pytest.mark.django_db
class TestUserViewSet:
def test_users_list(self, authenticated_client, create_test_user):
@@ -261,6 +273,16 @@ class TestUserViewSet:
assert response.status_code == status.HTTP_204_NO_CONTENT
assert not User.objects.filter(id=create_test_user.id).exists()
def test_users_destroy_other_user(
self, authenticated_client, create_test_user, users_fixture
):
user = users_fixture[2]
response = authenticated_client.delete(
reverse("user-detail", kwargs={"pk": str(user.id)})
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert User.objects.filter(id=create_test_user.id).exists()
def test_users_destroy_invalid_user(self, authenticated_client, create_test_user):
another_user = User.objects.create_user(
password="otherpassword", email="other@example.com"
@@ -268,7 +290,7 @@ class TestUserViewSet:
response = authenticated_client.delete(
reverse("user-detail", kwargs={"pk": another_user.id})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert User.objects.filter(id=another_user.id).exists()
@pytest.mark.parametrize(
@@ -868,6 +890,16 @@ class TestProviderViewSet:
"uid": "kubernetes-test-123456789",
"alias": "test",
},
{
"provider": "kubernetes",
"uid": "arn:aws:eks:us-east-1:111122223333:cluster/test-cluster-long-name-123456789",
"alias": "EKS",
},
{
"provider": "kubernetes",
"uid": "gke_aaaa-dev_europe-test1_dev-aaaa-test-cluster-long-name-123456789",
"alias": "GKE",
},
{
"provider": "azure",
"uid": "8851db6b-42e5-4533-aa9e-30a32d67e875",
@@ -2052,9 +2084,9 @@ class TestScanViewSet:
("started_at.gte", "2024-01-01", 3),
("started_at.lte", "2024-01-01", 0),
("trigger", Scan.TriggerChoices.MANUAL, 1),
("state", StateChoices.AVAILABLE, 2),
("state", StateChoices.AVAILABLE, 1),
("state", StateChoices.FAILED, 1),
("state.in", f"{StateChoices.FAILED},{StateChoices.AVAILABLE}", 3),
("state.in", f"{StateChoices.FAILED},{StateChoices.AVAILABLE}", 2),
("trigger", Scan.TriggerChoices.MANUAL, 1),
]
),
@@ -2129,6 +2161,159 @@ class TestScanViewSet:
response = authenticated_client.get(reverse("scan-list"), {"sort": "invalid"})
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_report_executing(self, authenticated_client, scans_fixture):
"""
When the scan is still executing (state == EXECUTING), the view should return
the task data with HTTP 202 and a Content-Location header.
"""
scan = scans_fixture[0]
scan.state = StateChoices.EXECUTING
scan.save()
task = Task.objects.create(tenant_id=scan.tenant_id)
dummy_task_data = {"id": str(task.id), "state": StateChoices.EXECUTING}
scan.task = task
scan.save()
with patch(
"api.v1.views.TaskSerializer",
return_value=type("DummySerializer", (), {"data": dummy_task_data}),
):
url = reverse("scan-report", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == status.HTTP_202_ACCEPTED
assert "Content-Location" in response
assert dummy_task_data["id"] in response["Content-Location"]
def test_report_celery_task_executing(self, authenticated_client, scans_fixture):
"""
When the scan is not executing but a related celery task exists and is running,
the view should return that task data with HTTP 202.
"""
scan = scans_fixture[0]
scan.state = StateChoices.COMPLETED
scan.output_location = "dummy"
scan.save()
dummy_task = Task.objects.create(tenant_id=scan.tenant_id)
dummy_task.id = "dummy-task-id"
dummy_task_data = {"id": dummy_task.id, "state": StateChoices.EXECUTING}
with patch("api.v1.views.Task.objects.get", return_value=dummy_task), patch(
"api.v1.views.TaskSerializer",
return_value=type("DummySerializer", (), {"data": dummy_task_data}),
):
url = reverse("scan-report", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == status.HTTP_202_ACCEPTED
assert "Content-Location" in response
assert dummy_task_data["id"] in response["Content-Location"]
def test_report_no_output_location(self, authenticated_client, scans_fixture):
"""
If the scan does not have an output_location, the view should return a 404.
"""
scan = scans_fixture[0]
scan.state = StateChoices.COMPLETED
scan.output_location = ""
scan.save()
url = reverse("scan-report", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == status.HTTP_404_NOT_FOUND
assert response.json()["errors"]["detail"] == "The scan has no reports."
def test_report_s3_no_credentials(
self, authenticated_client, scans_fixture, monkeypatch
):
"""
When output_location is an S3 URL and get_s3_client() raises a credentials exception,
the view should return HTTP 403 with the proper error message.
"""
scan = scans_fixture[0]
bucket = "test-bucket"
key = "report.zip"
scan.output_location = f"s3://{bucket}/{key}"
scan.state = StateChoices.COMPLETED
scan.save()
def fake_get_s3_client():
raise NoCredentialsError()
monkeypatch.setattr("api.v1.views.get_s3_client", fake_get_s3_client)
url = reverse("scan-report", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == status.HTTP_403_FORBIDDEN
assert (
response.json()["errors"]["detail"]
== "There is a problem with credentials."
)
def test_report_s3_success(self, authenticated_client, scans_fixture, monkeypatch):
"""
When output_location is an S3 URL and the S3 client returns the file successfully,
the view should return the ZIP file with HTTP 200 and proper headers.
"""
scan = scans_fixture[0]
bucket = "test-bucket"
key = "report.zip"
scan.output_location = f"s3://{bucket}/{key}"
scan.state = StateChoices.COMPLETED
scan.save()
monkeypatch.setattr(
"api.v1.views.env", type("env", (), {"str": lambda self, key: bucket})()
)
class FakeS3Client:
def get_object(self, Bucket, Key):
assert Bucket == bucket
assert Key == key
return {"Body": io.BytesIO(b"s3 zip content")}
monkeypatch.setattr("api.v1.views.get_s3_client", lambda: FakeS3Client())
url = reverse("scan-report", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == 200
expected_filename = os.path.basename("report.zip")
content_disposition = response.get("Content-Disposition")
assert content_disposition.startswith('attachment; filename="')
assert f'filename="{expected_filename}"' in content_disposition
assert response.content == b"s3 zip content"
def test_report_local_file(
self, authenticated_client, scans_fixture, tmp_path, monkeypatch
):
"""
When output_location is a local file path, the view should read the file from disk
and return it with proper headers.
"""
scan = scans_fixture[0]
file_content = b"local zip file content"
file_path = tmp_path / "report.zip"
file_path.write_bytes(file_content)
scan.output_location = str(file_path)
scan.state = StateChoices.COMPLETED
scan.save()
monkeypatch.setattr(
glob,
"glob",
lambda pattern: [str(file_path)] if pattern == str(file_path) else [],
)
url = reverse("scan-report", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == 200
assert response.content == file_content
content_disposition = response.get("Content-Disposition")
assert content_disposition.startswith('attachment; filename="')
assert f'filename="{file_path.name}"' in content_disposition
@pytest.mark.django_db
class TestTaskViewSet:
@@ -2369,12 +2554,33 @@ class TestResourceViewSet:
@pytest.mark.django_db
class TestFindingViewSet:
def test_findings_list_none(self, authenticated_client):
response = authenticated_client.get(reverse("finding-list"))
response = authenticated_client.get(
reverse("finding-list"), {"filter[inserted_at]": TODAY}
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 0
def test_findings_list(self, authenticated_client, findings_fixture):
def test_findings_list_no_date_filter(self, authenticated_client):
response = authenticated_client.get(reverse("finding-list"))
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["errors"][0]["code"] == "required"
def test_findings_date_range_too_large(self, authenticated_client):
response = authenticated_client.get(
reverse("finding-list"),
{
"filter[inserted_at.lte]": today_after_n_days(
-(settings.FINDINGS_MAX_DAYS_IN_RANGE + 1)
),
},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["errors"][0]["code"] == "invalid"
def test_findings_list(self, authenticated_client, findings_fixture):
response = authenticated_client.get(
reverse("finding-list"), {"filter[inserted_at]": TODAY}
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(findings_fixture)
assert (
@@ -2387,14 +2593,15 @@ class TestFindingViewSet:
[
("resources", ["resources"]),
("scan", ["scans"]),
("resources.provider,scan", ["resources", "scans", "providers"]),
("resources,scan.provider", ["resources", "scans", "providers"]),
],
)
def test_findings_list_include(
self, include_values, expected_resources, authenticated_client, findings_fixture
):
response = authenticated_client.get(
reverse("finding-list"), {"include": include_values}
reverse("finding-list"),
{"include": include_values, "filter[inserted_at]": TODAY},
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(findings_fixture)
@@ -2429,21 +2636,31 @@ class TestFindingViewSet:
("service.icontains", "ec", 1),
("inserted_at", "2024-01-01", 0),
("inserted_at.date", "2024-01-01", 0),
("inserted_at.gte", "2024-01-01", 2),
("inserted_at.gte", today_after_n_days(-1), 2),
(
"inserted_at.lte",
"2028-12-31",
today_after_n_days(1),
2,
), # TODO: To avoid having to modify this value and to ensure that the tests always work, we should set the time before the fixtures are inserted
("updated_at.lte", "2024-01-01", 0),
),
("updated_at.lte", today_after_n_days(-1), 0),
("resource_type.icontains", "prowler", 2),
# full text search on finding
("search", "dev-qa", 1),
("search", "orange juice", 1),
# full text search on resource
("search", "ec2", 2),
# full text search on finding tags
("search", "value2", 2),
# full text search on finding tags (disabled for now)
# ("search", "value2", 2),
# Temporary disabled until we implement tag filtering in the UI
# ("resource_tag_key", "key", 2),
# ("resource_tag_key__in", "key,key2", 2),
# ("resource_tag_key__icontains", "key", 2),
# ("resource_tag_value", "value", 2),
# ("resource_tag_value__in", "value,value2", 2),
# ("resource_tag_value__icontains", "value", 2),
# ("resource_tags", "key:value", 2),
# ("resource_tags", "not:exists", 0),
# ("resource_tags", "not:exists,key:value", 2),
]
),
)
@@ -2455,9 +2672,13 @@ class TestFindingViewSet:
filter_value,
expected_count,
):
filters = {f"filter[{filter_name}]": filter_value}
if "inserted_at" not in filter_name:
filters["filter[inserted_at]"] = TODAY
response = authenticated_client.get(
reverse("finding-list"),
{f"filter[{filter_name}]": filter_value},
filters,
)
assert response.status_code == status.HTTP_200_OK
@@ -2466,9 +2687,7 @@ class TestFindingViewSet:
def test_finding_filter_by_scan_id(self, authenticated_client, findings_fixture):
response = authenticated_client.get(
reverse("finding-list"),
{
"filter[scan]": findings_fixture[0].scan.id,
},
{"filter[scan]": findings_fixture[0].scan.id},
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 2
@@ -2491,6 +2710,7 @@ class TestFindingViewSet:
reverse("finding-list"),
{
"filter[provider]": findings_fixture[0].scan.provider.id,
"filter[inserted_at]": TODAY,
},
)
assert response.status_code == status.HTTP_200_OK
@@ -2505,7 +2725,8 @@ class TestFindingViewSet:
"filter[provider.in]": [
findings_fixture[0].scan.provider.id,
findings_fixture[1].scan.provider.id,
]
],
"filter[inserted_at]": TODAY,
},
)
assert response.status_code == status.HTTP_200_OK
@@ -2539,13 +2760,13 @@ class TestFindingViewSet:
)
def test_findings_sort(self, authenticated_client, sort_field):
response = authenticated_client.get(
reverse("finding-list"), {"sort": sort_field}
reverse("finding-list"), {"sort": sort_field, "filter[inserted_at]": TODAY}
)
assert response.status_code == status.HTTP_200_OK
def test_findings_sort_invalid(self, authenticated_client):
response = authenticated_client.get(
reverse("finding-list"), {"sort": "invalid"}
reverse("finding-list"), {"sort": "invalid", "filter[inserted_at]": TODAY}
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["errors"][0]["code"] == "invalid"
@@ -2582,59 +2803,74 @@ class TestFindingViewSet:
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_findings_services_regions_retrieve(
self, authenticated_client, findings_fixture
):
def test_findings_metadata_retrieve(self, authenticated_client, findings_fixture):
finding_1, *_ = findings_fixture
response = authenticated_client.get(
reverse("finding-findings_services_regions"),
reverse("finding-metadata"),
{"filter[inserted_at]": finding_1.updated_at.strftime("%Y-%m-%d")},
)
data = response.json()
expected_services = {"ec2", "s3"}
expected_regions = {"eu-west-1", "us-east-1"}
# Temporarily disabled until we implement tag filtering in the UI
# expected_tags = {"key": ["value"], "key2": ["value2"]}
expected_resource_types = {"prowler-test"}
assert data["data"]["type"] == "finding-dynamic-filters"
assert data["data"]["type"] == "findings-metadata"
assert data["data"]["id"] is None
assert set(data["data"]["attributes"]["services"]) == expected_services
assert set(data["data"]["attributes"]["regions"]) == expected_regions
assert (
set(data["data"]["attributes"]["resource_types"]) == expected_resource_types
)
# assert data["data"]["attributes"]["tags"] == expected_tags
def test_findings_services_regions_severity_retrieve(
def test_findings_metadata_severity_retrieve(
self, authenticated_client, findings_fixture
):
finding_1, *_ = findings_fixture
response = authenticated_client.get(
reverse("finding-findings_services_regions"),
reverse("finding-metadata"),
{
"filter[severity__in]": ["low", "medium"],
"filter[inserted_at]": finding_1.updated_at.strftime("%Y-%m-%d"),
"filter[inserted_at]": finding_1.inserted_at.strftime("%Y-%m-%d"),
},
)
data = response.json()
expected_services = {"s3"}
expected_regions = {"eu-west-1"}
# Temporary disabled until we implement tag filtering in the UI
# expected_tags = {"key": ["value"], "key2": ["value2"]}
expected_resource_types = {"prowler-test"}
assert data["data"]["type"] == "finding-dynamic-filters"
assert data["data"]["type"] == "findings-metadata"
assert data["data"]["id"] is None
assert set(data["data"]["attributes"]["services"]) == expected_services
assert set(data["data"]["attributes"]["regions"]) == expected_regions
assert (
set(data["data"]["attributes"]["resource_types"]) == expected_resource_types
)
# assert data["data"]["attributes"]["tags"] == expected_tags
def test_findings_services_regions_future_date(self, authenticated_client):
def test_findings_metadata_future_date(self, authenticated_client):
response = authenticated_client.get(
reverse("finding-findings_services_regions"),
reverse("finding-metadata"),
{"filter[inserted_at]": "2048-01-01"},
)
data = response.json()
assert data["data"]["type"] == "finding-dynamic-filters"
assert data["data"]["type"] == "findings-metadata"
assert data["data"]["id"] is None
assert data["data"]["attributes"]["services"] == []
assert data["data"]["attributes"]["regions"] == []
# Temporary disabled until we implement tag filtering in the UI
# assert data["data"]["attributes"]["tags"] == {}
assert data["data"]["attributes"]["resource_types"] == []
def test_findings_services_regions_invalid_date(self, authenticated_client):
def test_findings_metadata_invalid_date(self, authenticated_client):
response = authenticated_client.get(
reverse("finding-findings_services_regions"),
reverse("finding-metadata"),
{"filter[inserted_at]": "2048-01-011"},
)
assert response.json() == {
@@ -4249,18 +4485,15 @@ class TestOverviewViewSet:
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_overview_providers_list(
self, authenticated_client, findings_fixture, resources_fixture
self, authenticated_client, scan_summaries_fixture, resources_fixture
):
response = authenticated_client.get(reverse("overview-providers"))
assert response.status_code == status.HTTP_200_OK
# Only findings from one provider
assert len(response.json()["data"]) == 1
assert response.json()["data"][0]["attributes"]["findings"]["total"] == len(
findings_fixture
)
assert response.json()["data"][0]["attributes"]["findings"]["pass"] == 0
assert response.json()["data"][0]["attributes"]["findings"]["fail"] == 2
assert response.json()["data"][0]["attributes"]["findings"]["manual"] == 0
assert response.json()["data"][0]["attributes"]["findings"]["total"] == 4
assert response.json()["data"][0]["attributes"]["findings"]["pass"] == 2
assert response.json()["data"][0]["attributes"]["findings"]["fail"] == 1
assert response.json()["data"][0]["attributes"]["findings"]["muted"] == 1
assert response.json()["data"][0]["attributes"]["resources"]["total"] == len(
resources_fixture
)

View File

@@ -1,15 +1,25 @@
from datetime import datetime, timezone
from allauth.socialaccount.providers.oauth2.client import OAuth2Client
from rest_framework.exceptions import NotFound, ValidationError
from api.db_router import MainRouter
from api.exceptions import InvitationTokenExpiredException
from api.models import Invitation, Provider
from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.azure.azure_provider import AzureProvider
from prowler.providers.common.models import Connection
from prowler.providers.gcp.gcp_provider import GcpProvider
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
from rest_framework.exceptions import ValidationError, NotFound
from api.db_router import MainRouter
from api.exceptions import InvitationTokenExpiredException
from api.models import Provider, Invitation
class CustomOAuth2Client(OAuth2Client):
def __init__(self, client_id, secret, *args, **kwargs):
# Remove any duplicate "scope_delimiter" from kwargs
# Bug present in dj-rest-auth after version v7.0.1
# https://github.com/iMerica/dj-rest-auth/issues/673
kwargs.pop("scope_delimiter", None)
super().__init__(client_id, secret, *args, **kwargs)
def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:

View File

@@ -106,7 +106,7 @@ def uuid7_end(uuid_obj: UUID, offset_months: int = 1) -> UUID:
Args:
uuid_obj: A UUIDv7 object.
offset_days: Number of months to offset from the given UUID's date. Defaults to 1 to handle if
offset_months: Number of months to offset from the given UUID's date. Defaults to 1 to handle if
partitions are not being used, if so the value will be the one set at FINDINGS_TABLE_PARTITION_MONTHS.
Returns:

View File

@@ -38,7 +38,65 @@ from api.rls import Tenant
# Tokens
class TokenSerializer(TokenObtainPairSerializer):
def generate_tokens(user: User, tenant_id: str) -> dict:
try:
refresh = RefreshToken.for_user(user)
except InvalidKeyError:
# Handle invalid key error
raise ValidationError(
{
"detail": "Token generation failed due to invalid key configuration. Provide valid "
"DJANGO_TOKEN_SIGNING_KEY and DJANGO_TOKEN_VERIFYING_KEY in the environment."
}
)
except Exception as e:
raise ValidationError({"detail": str(e)})
# Post-process the tokens
# Set the tenant_id
refresh["tenant_id"] = tenant_id
# Set the nbf (not before) claim to the iat (issued at) claim. At this moment, simplejwt does not provide a
# way to set the nbf claim
refresh.payload["nbf"] = refresh["iat"]
# Get the access token
access = refresh.access_token
if settings.SIMPLE_JWT["UPDATE_LAST_LOGIN"]:
update_last_login(None, user)
return {"access": str(access), "refresh": str(refresh)}
class BaseTokenSerializer(TokenObtainPairSerializer):
def custom_validate(self, attrs, social: bool = False):
email = attrs.get("email")
password = attrs.get("password")
tenant_id = str(attrs.get("tenant_id", ""))
# Authenticate user
user = (
User.objects.get(email=email)
if social
else authenticate(username=email, password=password)
)
if user is None:
raise ValidationError("Invalid credentials")
if tenant_id:
if not user.is_member_of_tenant(tenant_id):
raise ValidationError("Tenant does not exist or user is not a member.")
else:
first_membership = user.memberships.order_by("date_joined").first()
if first_membership is None:
raise ValidationError("User has no memberships.")
tenant_id = str(first_membership.tenant_id)
return generate_tokens(user, tenant_id)
class TokenSerializer(BaseTokenSerializer):
email = serializers.EmailField(write_only=True)
password = serializers.CharField(write_only=True)
tenant_id = serializers.UUIDField(
@@ -56,53 +114,25 @@ class TokenSerializer(TokenObtainPairSerializer):
resource_name = "tokens"
def validate(self, attrs):
email = attrs.get("email")
password = attrs.get("password")
tenant_id = str(attrs.get("tenant_id", ""))
return super().custom_validate(attrs)
# Authenticate user
user = authenticate(username=email, password=password)
if user is None:
raise ValidationError("Invalid credentials")
if tenant_id:
if not user.is_member_of_tenant(tenant_id):
raise ValidationError("Tenant does not exist or user is not a member.")
else:
first_membership = user.memberships.order_by("date_joined").first()
if first_membership is None:
raise ValidationError("User has no memberships.")
tenant_id = str(first_membership.tenant_id)
class TokenSocialLoginSerializer(BaseTokenSerializer):
email = serializers.EmailField(write_only=True)
# Generate tokens
try:
refresh = RefreshToken.for_user(user)
except InvalidKeyError:
# Handle invalid key error
raise ValidationError(
{
"detail": "Token generation failed due to invalid key configuration. Provide valid "
"DJANGO_TOKEN_SIGNING_KEY and DJANGO_TOKEN_VERIFYING_KEY in the environment."
}
)
except Exception as e:
raise ValidationError({"detail": str(e)})
# Output tokens
refresh = serializers.CharField(read_only=True)
access = serializers.CharField(read_only=True)
# Post-process the tokens
# Set the tenant_id
refresh["tenant_id"] = tenant_id
class JSONAPIMeta:
resource_name = "tokens"
# Set the nbf (not before) claim to the iat (issued at) claim. At this moment, simplejwt does not provide a
# way to set the nbf claim
refresh.payload["nbf"] = refresh["iat"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields.pop("password", None)
# Get the access token
access = refresh.access_token
if settings.SIMPLE_JWT["UPDATE_LAST_LOGIN"]:
update_last_login(None, user)
return {"access": str(access), "refresh": str(refresh)}
def validate(self, attrs):
return super().custom_validate(attrs, social=True)
# TODO: Check if we can change the parent class to TokenRefreshSerializer from rest_framework_simplejwt.serializers
@@ -140,6 +170,30 @@ class TokenRefreshSerializer(serializers.Serializer):
raise ValidationError({"refresh": "Invalid or expired token"})
class TokenSwitchTenantSerializer(serializers.Serializer):
tenant_id = serializers.UUIDField(
write_only=True, help_text="The tenant ID for which to request a new token."
)
access = serializers.CharField(read_only=True)
refresh = serializers.CharField(read_only=True)
class JSONAPIMeta:
resource_name = "tokens-switch-tenant"
def validate(self, attrs):
request = self.context["request"]
user = request.user
if not user.is_authenticated:
raise ValidationError("Invalid or expired token.")
tenant_id = str(attrs.get("tenant_id"))
if not user.is_member_of_tenant(tenant_id):
raise ValidationError("Tenant does not exist or user is not a member.")
return generate_tokens(user, tenant_id)
# Base
@@ -691,6 +745,43 @@ class ProviderSerializer(RLSSerializer):
}
class ProviderIncludeSerializer(RLSSerializer):
"""
Serializer for the Provider model.
"""
provider = ProviderEnumSerializerField()
connection = serializers.SerializerMethodField(read_only=True)
class Meta:
model = Provider
fields = [
"id",
"inserted_at",
"updated_at",
"provider",
"uid",
"alias",
"connection",
# "scanner_args",
]
@extend_schema_field(
{
"type": "object",
"properties": {
"connected": {"type": "boolean"},
"last_checked_at": {"type": "string", "format": "date-time"},
},
}
)
def get_connection(self, obj):
return {
"connected": obj.connected,
"last_checked_at": obj.connection_last_checked_at,
}
class ProviderCreateSerializer(RLSSerializer, BaseWriteSerializer):
class Meta:
model = Provider
@@ -753,6 +844,35 @@ class ScanSerializer(RLSSerializer):
]
class ScanIncludeSerializer(RLSSerializer):
trigger = serializers.ChoiceField(
choices=Scan.TriggerChoices.choices, read_only=True
)
state = StateEnumSerializerField(read_only=True)
class Meta:
model = Scan
fields = [
"id",
"name",
"trigger",
"state",
"unique_resource_count",
"progress",
# "scanner_args",
"duration",
"inserted_at",
"started_at",
"completed_at",
"scheduled_at",
"provider",
]
included_serializers = {
"provider": "api.v1.serializers.ProviderIncludeSerializer",
}
class ScanCreateSerializer(RLSSerializer, BaseWriteSerializer):
class Meta:
model = Scan
@@ -819,6 +939,14 @@ class ScanTaskSerializer(RLSSerializer):
]
class ScanReportSerializer(serializers.Serializer):
id = serializers.CharField(source="scan")
class Meta:
resource_name = "scan-reports"
fields = ["id"]
class ResourceTagSerializer(RLSSerializer):
"""
Serializer for the ResourceTag model
@@ -884,6 +1012,51 @@ class ResourceSerializer(RLSSerializer):
return fields
class ResourceIncludeSerializer(RLSSerializer):
"""
Serializer for the Resource model.
"""
tags = serializers.SerializerMethodField()
type_ = serializers.CharField(read_only=True)
class Meta:
model = Resource
fields = [
"id",
"inserted_at",
"updated_at",
"uid",
"name",
"region",
"service",
"type_",
"tags",
]
extra_kwargs = {
"id": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
}
@extend_schema_field(
{
"type": "object",
"description": "Tags associated with the resource",
"example": {"env": "prod", "owner": "johndoe"},
}
)
def get_tags(self, obj):
return obj.get_tags(self.context.get("tenant_id"))
def get_fields(self):
"""`type` is a Python reserved keyword."""
fields = super().get_fields()
type_ = fields.pop("type_")
fields["type"] = type_
return fields
class FindingSerializer(RLSSerializer):
"""
Serializer for the Finding model.
@@ -905,6 +1078,7 @@ class FindingSerializer(RLSSerializer):
"raw_result",
"inserted_at",
"updated_at",
"first_seen_at",
"url",
# Relationships
"scan",
@@ -912,11 +1086,12 @@ class FindingSerializer(RLSSerializer):
]
included_serializers = {
"scan": ScanSerializer,
"resources": ResourceSerializer,
"scan": ScanIncludeSerializer,
"resources": ResourceIncludeSerializer,
}
# To be removed when the related endpoint is removed as well
class FindingDynamicFilterSerializer(serializers.Serializer):
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
@@ -925,6 +1100,19 @@ class FindingDynamicFilterSerializer(serializers.Serializer):
resource_name = "finding-dynamic-filters"
class FindingMetadataSerializer(serializers.Serializer):
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
resource_types = serializers.ListField(
child=serializers.CharField(), allow_empty=True
)
# Temporarily disabled until we implement tag filtering in the UI
# tags = serializers.JSONField(help_text="Tags are described as key-value pairs.")
class Meta:
resource_name = "findings-metadata"
# Provider secrets
class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
@staticmethod
@@ -997,7 +1185,7 @@ class KubernetesProviderSecret(serializers.Serializer):
class AWSRoleAssumptionProviderSecret(serializers.Serializer):
role_arn = serializers.CharField()
external_id = serializers.CharField(required=False)
external_id = serializers.CharField()
role_session_name = serializers.CharField(required=False)
session_duration = serializers.IntegerField(
required=False, min_value=900, max_value=43200
@@ -1044,6 +1232,10 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
"assumption.",
},
"external_id": {
"type": "string",
"description": "An identifier to enhance security for role assumption.",
},
"aws_access_key_id": {
"type": "string",
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
@@ -1065,11 +1257,6 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
"default": 3600,
"description": "The duration (in seconds) for the role session.",
},
"external_id": {
"type": "string",
"description": "An optional identifier to enhance security for role assumption; may be "
"required by the role administrator.",
},
"role_session_name": {
"type": "string",
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
@@ -1083,7 +1270,7 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
},
},
"required": ["role_arn"],
"required": ["role_arn", "external_id"],
},
{
"type": "object",
@@ -1722,7 +1909,7 @@ class OverviewProviderSerializer(serializers.Serializer):
"properties": {
"pass": {"type": "integer"},
"fail": {"type": "integer"},
"manual": {"type": "integer"},
"muted": {"type": "integer"},
"total": {"type": "integer"},
},
}
@@ -1731,7 +1918,7 @@ class OverviewProviderSerializer(serializers.Serializer):
return {
"pass": obj["findings_passed"],
"fail": obj["findings_failed"],
"manual": obj["findings_manual"],
"muted": obj["findings_muted"],
"total": obj["total_findings"],
}

View File

@@ -3,28 +3,31 @@ from drf_spectacular.views import SpectacularRedocView
from rest_framework_nested import routers
from api.v1.views import (
ComplianceOverviewViewSet,
CustomTokenObtainView,
CustomTokenRefreshView,
CustomTokenSwitchTenantView,
FindingViewSet,
MembershipViewSet,
ProviderGroupViewSet,
ProviderGroupProvidersRelationshipView,
ProviderSecretViewSet,
InvitationViewSet,
GithubSocialLoginView,
GoogleSocialLoginView,
InvitationAcceptViewSet,
RoleViewSet,
RoleProviderGroupRelationshipView,
UserRoleRelationshipView,
InvitationViewSet,
MembershipViewSet,
OverviewViewSet,
ComplianceOverviewViewSet,
ProviderGroupProvidersRelationshipView,
ProviderGroupViewSet,
ProviderSecretViewSet,
ProviderViewSet,
ResourceViewSet,
RoleProviderGroupRelationshipView,
RoleViewSet,
ScanViewSet,
ScheduleViewSet,
SchemaView,
TaskViewSet,
TenantMembersViewSet,
TenantViewSet,
UserRoleRelationshipView,
UserViewSet,
)
@@ -56,6 +59,7 @@ users_router.register(r"memberships", MembershipViewSet, basename="user-membersh
urlpatterns = [
path("tokens", CustomTokenObtainView.as_view(), name="token-obtain"),
path("tokens/refresh", CustomTokenRefreshView.as_view(), name="token-refresh"),
path("tokens/switch", CustomTokenSwitchTenantView.as_view(), name="token-switch"),
path(
"providers/secrets",
ProviderSecretViewSet.as_view({"get": "list", "post": "create"}),
@@ -106,6 +110,8 @@ urlpatterns = [
),
name="provider_group-providers-relationship",
),
path("tokens/google", GoogleSocialLoginView.as_view(), name="token-google"),
path("tokens/github", GithubSocialLoginView.as_view(), name="token-github"),
path("", include(router.urls)),
path("", include(tenants_router.urls)),
path("", include(users_router.urls)),

View File

@@ -1,9 +1,23 @@
import glob
import os
from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
from celery.result import AsyncResult
from config.env import env
from config.settings.social_login import (
GITHUB_OAUTH_CALLBACK_URL,
GOOGLE_OAUTH_CALLBACK_URL,
)
from dj_rest_auth.registration.views import SocialLoginView
from django.conf import settings as django_settings
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.search import SearchQuery
from django.db import transaction
from django.db.models import Count, F, OuterRef, Prefetch, Q, Subquery, Sum
from django.db.models import Count, Exists, F, OuterRef, Prefetch, Q, Subquery, Sum
from django.db.models.functions import Coalesce
from django.http import HttpResponse
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
@@ -30,11 +44,11 @@ from rest_framework.permissions import SAFE_METHODS
from rest_framework_json_api.views import RelationshipView, Response
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
from tasks.beat import schedule_provider_scan
from tasks.jobs.export import get_s3_client
from tasks.tasks import (
check_provider_connection_task,
delete_provider_task,
delete_tenant_task,
perform_scan_summary_task,
perform_scan_task,
)
@@ -67,13 +81,13 @@ from api.models import (
ProviderGroupMembership,
ProviderSecret,
Resource,
ResourceFindingMapping,
Role,
RoleProviderGroupRelationship,
Scan,
ScanSummary,
SeverityChoices,
StateChoices,
StatusChoices,
Task,
User,
UserRoleRelationship,
@@ -81,12 +95,12 @@ from api.models import (
from api.pagination import ComplianceOverviewPagination
from api.rbac.permissions import Permissions, get_providers, get_role
from api.rls import Tenant
from api.utils import validate_invitation
from api.uuid_utils import datetime_to_uuid7
from api.utils import CustomOAuth2Client, validate_invitation
from api.v1.serializers import (
ComplianceOverviewFullSerializer,
ComplianceOverviewSerializer,
FindingDynamicFilterSerializer,
FindingMetadataSerializer,
FindingSerializer,
InvitationAcceptSerializer,
InvitationCreateSerializer,
@@ -113,6 +127,7 @@ from api.v1.serializers import (
RoleSerializer,
RoleUpdateSerializer,
ScanCreateSerializer,
ScanReportSerializer,
ScanSerializer,
ScanUpdateSerializer,
ScheduleDailyCreateSerializer,
@@ -120,6 +135,8 @@ from api.v1.serializers import (
TenantSerializer,
TokenRefreshSerializer,
TokenSerializer,
TokenSocialLoginSerializer,
TokenSwitchTenantSerializer,
UserCreateSerializer,
UserRoleRelationshipSerializer,
UserSerializer,
@@ -186,13 +203,43 @@ class CustomTokenRefreshView(GenericAPIView):
)
@extend_schema(
tags=["Token"],
summary="Switch tenant using a valid tenant ID",
description="Switch tenant by providing a valid tenant ID. The authenticated user must belong to the tenant.",
)
class CustomTokenSwitchTenantView(GenericAPIView):
permission_classes = [permissions.IsAuthenticated]
resource_name = "tokens-switch-tenant"
serializer_class = TokenSwitchTenantSerializer
http_method_names = ["post"]
def post(self, request):
serializer = TokenSwitchTenantSerializer(
data=request.data, context={"request": request}
)
try:
serializer.is_valid(raise_exception=True)
except TokenError as e:
raise InvalidToken(e.args[0])
return Response(
data={
"type": "tokens-switch-tenant",
"attributes": serializer.validated_data,
},
status=status.HTTP_200_OK,
)
@extend_schema(exclude=True)
class SchemaView(SpectacularAPIView):
serializer_class = None
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.1.1"
spectacular_settings.VERSION = "1.5.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -252,6 +299,58 @@ class SchemaView(SpectacularAPIView):
return super().get(request, *args, **kwargs)
@extend_schema(exclude=True)
class GoogleSocialLoginView(SocialLoginView):
adapter_class = GoogleOAuth2Adapter
client_class = CustomOAuth2Client
callback_url = GOOGLE_OAUTH_CALLBACK_URL
def get_response(self):
original_response = super().get_response()
if self.user and self.user.is_authenticated:
serializer = TokenSocialLoginSerializer(data={"email": self.user.email})
try:
serializer.is_valid(raise_exception=True)
except TokenError as e:
raise InvalidToken(e.args[0])
return Response(
data={
"type": "google-social-tokens",
"attributes": serializer.validated_data,
},
status=status.HTTP_200_OK,
)
return original_response
@extend_schema(exclude=True)
class GithubSocialLoginView(SocialLoginView):
adapter_class = GitHubOAuth2Adapter
client_class = CustomOAuth2Client
callback_url = GITHUB_OAUTH_CALLBACK_URL
def get_response(self):
original_response = super().get_response()
if self.user and self.user.is_authenticated:
serializer = TokenSocialLoginSerializer(data={"email": self.user.email})
try:
serializer.is_valid(raise_exception=True)
except TokenError as e:
raise InvalidToken(e.args[0])
return Response(
data={
"type": "github-social-tokens",
"attributes": serializer.validated_data,
},
status=status.HTTP_200_OK,
)
return original_response
@extend_schema_view(
list=extend_schema(
tags=["User"],
@@ -275,8 +374,8 @@ class SchemaView(SpectacularAPIView):
),
destroy=extend_schema(
tags=["User"],
summary="Delete a user account",
description="Remove a user account from the system.",
summary="Delete the user account",
description="Remove the current user account from the system.",
),
me=extend_schema(
tags=["User"],
@@ -340,6 +439,12 @@ class UserViewSet(BaseUserViewset):
status=status.HTTP_200_OK,
)
def destroy(self, request, *args, **kwargs):
if kwargs["pk"] != str(self.request.user.id):
raise ValidationError("Only the current user can be deleted.")
return super().destroy(request, *args, **kwargs)
@extend_schema(
parameters=[
OpenApiParameter(
@@ -1018,6 +1123,18 @@ class ProviderViewSet(BaseRLSViewSet):
request=ScanCreateSerializer,
responses={202: OpenApiResponse(response=TaskSerializer)},
),
report=extend_schema(
tags=["Scan"],
summary="Download ZIP report",
description="Returns a ZIP file containing the requested report",
request=ScanReportSerializer,
responses={
200: OpenApiResponse(description="Report obtained successfully"),
202: OpenApiResponse(description="The task is in progress"),
403: OpenApiResponse(description="There is a problem with credentials"),
404: OpenApiResponse(description="The scan has no reports"),
},
),
)
@method_decorator(CACHE_DECORATOR, name="list")
@method_decorator(CACHE_DECORATOR, name="retrieve")
@@ -1044,7 +1161,7 @@ class ScanViewSet(BaseRLSViewSet):
"""
if self.request.method in SAFE_METHODS:
# No permissions required for GET requests
self.required_permissions = [Permissions.MANAGE_PROVIDERS]
self.required_permissions = []
else:
# Require permission for non-GET requests
self.required_permissions = [Permissions.MANAGE_SCANS]
@@ -1066,6 +1183,10 @@ class ScanViewSet(BaseRLSViewSet):
return ScanCreateSerializer
elif self.action == "partial_update":
return ScanUpdateSerializer
elif self.action == "report":
if hasattr(self, "response_serializer_class"):
return self.response_serializer_class
return ScanReportSerializer
return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs):
@@ -1083,6 +1204,93 @@ class ScanViewSet(BaseRLSViewSet):
)
return Response(data=read_serializer.data, status=status.HTTP_200_OK)
@action(detail=True, methods=["get"], url_name="report")
def report(self, request, pk=None):
scan_instance = self.get_object()
if scan_instance.state == StateChoices.EXECUTING:
# If the scan is still running, return the task
prowler_task = Task.objects.get(id=scan_instance.task.id)
self.response_serializer_class = TaskSerializer
output_serializer = self.get_serializer(prowler_task)
return Response(
data=output_serializer.data,
status=status.HTTP_202_ACCEPTED,
headers={
"Content-Location": reverse(
"task-detail", kwargs={"pk": output_serializer.data["id"]}
)
},
)
try:
output_celery_task = Task.objects.get(
task_runner_task__task_name="scan-report",
task_runner_task__task_args__contains=pk,
)
self.response_serializer_class = TaskSerializer
output_serializer = self.get_serializer(output_celery_task)
if output_serializer.data["state"] == StateChoices.EXECUTING:
# If the task is still running, return the task
return Response(
data=output_serializer.data,
status=status.HTTP_202_ACCEPTED,
headers={
"Content-Location": reverse(
"task-detail", kwargs={"pk": output_serializer.data["id"]}
)
},
)
except Task.DoesNotExist:
# If the task does not exist, it means that the task is removed from the database
pass
output_location = scan_instance.output_location
if not output_location:
return Response(
{"detail": "The scan has no reports."},
status=status.HTTP_404_NOT_FOUND,
)
if scan_instance.output_location.startswith("s3://"):
try:
s3_client = get_s3_client()
except (ClientError, NoCredentialsError, ParamValidationError):
return Response(
{"detail": "There is a problem with credentials."},
status=status.HTTP_403_FORBIDDEN,
)
bucket_name = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET")
key = output_location[len(f"s3://{bucket_name}/") :]
try:
s3_object = s3_client.get_object(Bucket=bucket_name, Key=key)
except ClientError as e:
error_code = e.response.get("Error", {}).get("Code")
if error_code == "NoSuchKey":
return Response(
{"detail": "The scan has no reports."},
status=status.HTTP_404_NOT_FOUND,
)
return Response(
{"detail": "There is a problem with credentials."},
status=status.HTTP_403_FORBIDDEN,
)
file_content = s3_object["Body"].read()
filename = os.path.basename(output_location.split("/")[-1])
else:
zip_files = glob.glob(output_location)
file_path = zip_files[0]
with open(file_path, "rb") as f:
file_content = f.read()
filename = os.path.basename(file_path)
response = HttpResponse(
file_content, content_type="application/x-zip-compressed"
)
response["Content-Disposition"] = f'attachment; filename="{filename}"'
return response
def create(self, request, *args, **kwargs):
input_serializer = self.get_serializer(data=request.data)
input_serializer.is_valid(raise_exception=True)
@@ -1097,10 +1305,6 @@ class ScanViewSet(BaseRLSViewSet):
# Disabled for now
# checks_to_execute=scan.scanner_args.get("checks_to_execute"),
},
link=perform_scan_summary_task.si(
tenant_id=self.request.tenant_id,
scan_id=str(scan.id),
),
)
scan.task_id = task.id
@@ -1264,6 +1468,14 @@ class ResourceViewSet(BaseRLSViewSet):
tags=["Finding"],
summary="List all findings",
description="Retrieve a list of all findings with options for filtering by various criteria.",
parameters=[
OpenApiParameter(
name="filter[inserted_at]",
description="At least one of the variations of the `filter[inserted_at]` filter must be provided.",
required=True,
type=OpenApiTypes.DATE,
)
],
),
retrieve=extend_schema(
tags=["Finding"],
@@ -1274,33 +1486,51 @@ class ResourceViewSet(BaseRLSViewSet):
tags=["Finding"],
summary="Retrieve the services and regions that are impacted by findings",
description="Fetch services and regions affected in findings.",
responses={201: OpenApiResponse(response=MembershipSerializer)},
filters=True,
deprecated=True,
),
metadata=extend_schema(
tags=["Finding"],
summary="Retrieve metadata values from findings",
description="Fetch unique metadata values from a set of findings. This is useful for dynamic filtering.",
parameters=[
OpenApiParameter(
name="filter[inserted_at]",
description="At least one of the variations of the `filter[inserted_at]` filter must be provided.",
required=True,
type=OpenApiTypes.DATE,
)
],
filters=True,
),
)
@method_decorator(CACHE_DECORATOR, name="list")
@method_decorator(CACHE_DECORATOR, name="retrieve")
class FindingViewSet(BaseRLSViewSet):
queryset = Finding.objects.all()
queryset = Finding.all_objects.all()
serializer_class = FindingSerializer
prefetch_for_includes = {
"__all__": [],
"resources": [
Prefetch("resources", queryset=Resource.objects.select_related("findings"))
],
"scan": [Prefetch("scan", queryset=Scan.objects.select_related("findings"))],
}
http_method_names = ["get"]
filterset_class = FindingFilter
ordering = ["-id"]
http_method_names = ["get"]
ordering = ["-inserted_at"]
ordering_fields = [
"id",
"status",
"severity",
"check_id",
"inserted_at",
"updated_at",
]
prefetch_for_includes = {
"__all__": [],
"resources": [
Prefetch(
"resources",
queryset=Resource.all_objects.prefetch_related("tags", "findings"),
)
],
"scan": [
Prefetch("scan", queryset=Scan.all_objects.select_related("findings"))
],
}
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of
# the provider through the provider group)
required_permissions = []
@@ -1308,52 +1538,65 @@ class FindingViewSet(BaseRLSViewSet):
def get_serializer_class(self):
if self.action == "findings_services_regions":
return FindingDynamicFilterSerializer
elif self.action == "metadata":
return FindingMetadataSerializer
return super().get_serializer_class()
def get_queryset(self):
tenant_id = self.request.tenant_id
user_roles = get_role(self.request.user)
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all scans
queryset = Finding.objects.filter(tenant_id=self.request.tenant_id)
# User has unlimited visibility, return all findings
queryset = Finding.all_objects.filter(tenant_id=tenant_id)
else:
# User lacks permission, filter providers based on provider groups associated with the role
queryset = Finding.objects.filter(
# User lacks permission, filter findings based on provider groups associated with the role
queryset = Finding.all_objects.filter(
scan__provider__in=get_providers(user_roles)
)
search_value = self.request.query_params.get("filter[search]", None)
if search_value:
# Django's ORM will build a LEFT JOIN and OUTER JOIN on any "through" tables, resulting in duplicates
# The duplicates then require a `distinct` query
search_query = SearchQuery(
search_value, config="simple", search_type="plain"
)
resource_match = Resource.all_objects.filter(
text_search=search_query,
id__in=ResourceFindingMapping.objects.filter(
resource_id=OuterRef("pk"),
tenant_id=tenant_id,
).values("resource_id"),
)
queryset = queryset.filter(
Q(impact_extended__contains=search_value)
| Q(status_extended__contains=search_value)
| Q(check_id=search_value)
| Q(check_id__icontains=search_value)
| Q(text_search=search_query)
| Q(resources__uid=search_value)
| Q(resources__name=search_value)
| Q(resources__region=search_value)
| Q(resources__service=search_value)
| Q(resources__type=search_value)
| Q(resources__uid__contains=search_value)
| Q(resources__name__contains=search_value)
| Q(resources__region__contains=search_value)
| Q(resources__service__contains=search_value)
| Q(resources__tags__text_search=search_query)
| Q(resources__text_search=search_query)
).distinct()
Q(text_search=search_query) | Q(Exists(resource_match))
)
return queryset
def inserted_at_to_uuidv7(self, inserted_at):
if inserted_at is None:
return None
return datetime_to_uuid7(inserted_at)
def filter_queryset(self, queryset):
# Do not apply filters when retrieving specific finding
if self.action == "retrieve":
return queryset
return super().filter_queryset(queryset)
def list(self, request, *args, **kwargs):
base_qs = self.filter_queryset(self.get_queryset())
paginated_ids = self.paginate_queryset(base_qs.values_list("id", flat=True))
if paginated_ids is not None:
ids = list(paginated_ids)
findings = (
Finding.all_objects.filter(tenant_id=self.request.tenant_id, id__in=ids)
.select_related("scan")
.prefetch_related("resources")
)
# Re-sort in Python to preserve ordering:
findings = sorted(findings, key=lambda x: ids.index(x.id))
serializer = self.get_serializer(findings, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(base_qs, many=True)
return Response(serializer.data)
@action(detail=False, methods=["get"], url_name="findings_services_regions")
def findings_services_regions(self, request):
@@ -1376,6 +1619,38 @@ class FindingViewSet(BaseRLSViewSet):
return Response(data=serializer.data, status=status.HTTP_200_OK)
@action(detail=False, methods=["get"], url_name="metadata")
def metadata(self, request):
tenant_id = self.request.tenant_id
queryset = self.get_queryset()
filtered_queryset = self.filter_queryset(queryset)
filtered_ids = filtered_queryset.order_by().values("id")
relevant_resources = Resource.all_objects.filter(
tenant_id=tenant_id, findings__id__in=Subquery(filtered_ids)
).only("service", "region", "type")
aggregation = relevant_resources.aggregate(
services=ArrayAgg("service", flat=True),
regions=ArrayAgg("region", flat=True),
resource_types=ArrayAgg("type", flat=True),
)
services = sorted(set(aggregation["services"] or []))
regions = sorted({region for region in aggregation["regions"] or [] if region})
resource_types = sorted(set(aggregation["resource_types"] or []))
result = {
"services": services,
"regions": regions,
"resource_types": resource_types,
}
serializer = self.get_serializer(data=result)
serializer.is_valid(raise_exception=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema_view(
list=extend_schema(
@@ -1938,68 +2213,53 @@ class OverviewViewSet(BaseRLSViewSet):
@action(detail=False, methods=["get"], url_name="providers")
def providers(self, request):
tenant_id = self.request.tenant_id
# Subquery to get the most recent finding for each uid
latest_finding_ids = (
Finding.objects.filter(
latest_scan_ids = (
Scan.objects.filter(
tenant_id=tenant_id,
uid=OuterRef("uid"),
scan__provider=OuterRef("scan__provider"),
state=StateChoices.COMPLETED,
)
.order_by("-id") # Most recent by id
.values("id")[:1]
.order_by("provider_id", "-inserted_at")
.distinct("provider_id")
.values_list("id", flat=True)
)
# Filter findings to only include the most recent for each uid
recent_findings = Finding.objects.filter(
tenant_id=tenant_id, id__in=Subquery(latest_finding_ids)
)
# Aggregate findings by provider
findings_aggregated = (
recent_findings.values("scan__provider__provider")
ScanSummary.objects.filter(tenant_id=tenant_id, scan_id__in=latest_scan_ids)
.values("scan__provider__provider")
.annotate(
findings_passed=Count("id", filter=Q(status=StatusChoices.PASS.value)),
findings_failed=Count("id", filter=Q(status=StatusChoices.FAIL.value)),
findings_manual=Count(
"id", filter=Q(status=StatusChoices.MANUAL.value)
),
total_findings=Count("id"),
findings_passed=Coalesce(Sum("_pass"), 0),
findings_failed=Coalesce(Sum("fail"), 0),
findings_muted=Coalesce(Sum("muted"), 0),
total_findings=Coalesce(Sum("total"), 0),
)
.order_by("-findings_failed")
)
# Aggregate total resources by provider
resources_aggregated = (
Resource.objects.filter(tenant_id=tenant_id)
.values("provider__provider")
.annotate(total_resources=Count("id"))
)
resources_dict = {
row["provider__provider"]: row["total_resources"]
for row in resources_aggregated
}
# Combine findings and resources data
overview = []
for findings in findings_aggregated:
provider = findings["scan__provider__provider"]
total_resources = next(
(
res["total_resources"]
for res in resources_aggregated
if res["provider__provider"] == provider
),
0,
)
for row in findings_aggregated:
provider_type = row["scan__provider__provider"]
overview.append(
{
"provider": provider,
"total_resources": total_resources,
"total_findings": findings["total_findings"],
"findings_passed": findings["findings_passed"],
"findings_failed": findings["findings_failed"],
"findings_manual": findings["findings_manual"],
"provider": provider_type,
"total_resources": resources_dict.get(provider_type, 0),
"total_findings": row["total_findings"],
"findings_passed": row["findings_passed"],
"findings_failed": row["findings_failed"],
"findings_muted": row["findings_muted"],
}
)
serializer = OverviewProviderSerializer(overview, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(detail=False, methods=["get"], url_name="findings")
@@ -2014,7 +2274,7 @@ class OverviewViewSet(BaseRLSViewSet):
state=StateChoices.COMPLETED,
provider_id=OuterRef("scan__provider_id"),
)
.order_by("-id")
.order_by("-inserted_at")
.values("id")[:1]
)
@@ -2059,7 +2319,7 @@ class OverviewViewSet(BaseRLSViewSet):
state=StateChoices.COMPLETED,
provider_id=OuterRef("scan__provider_id"),
)
.order_by("-id")
.order_by("-inserted_at")
.values("id")[:1]
)
@@ -2095,7 +2355,7 @@ class OverviewViewSet(BaseRLSViewSet):
state=StateChoices.COMPLETED,
provider_id=OuterRef("scan__provider_id"),
)
.order_by("-id")
.order_by("-inserted_at")
.values("id")[:1]
)

View File

@@ -4,6 +4,8 @@ from config.custom_logging import LOGGING # noqa
from config.env import BASE_DIR, env # noqa
from config.settings.celery import * # noqa
from config.settings.partitions import * # noqa
from config.settings.sentry import * # noqa
from config.settings.social_login import * # noqa
SECRET_KEY = env("SECRET_KEY", default="secret")
DEBUG = env.bool("DJANGO_DEBUG", default=False)
@@ -29,6 +31,13 @@ INSTALLED_APPS = [
"django_celery_results",
"django_celery_beat",
"rest_framework_simplejwt.token_blacklist",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"allauth.socialaccount.providers.github",
"dj_rest_auth.registration",
"rest_framework.authtoken",
]
MIDDLEWARE = [
@@ -42,8 +51,11 @@ MIDDLEWARE = [
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"api.middleware.APILoggingMiddleware",
"allauth.account.middleware.AccountMiddleware",
]
SITE_ID = 1
CORS_ALLOWED_ORIGINS = ["http://localhost", "http://127.0.0.1"]
ROOT_URLCONF = "config.urls"
@@ -207,3 +219,20 @@ CACHE_STALE_WHILE_REVALIDATE = env.int("DJANGO_STALE_WHILE_REVALIDATE", 60)
TESTING = False
FINDINGS_MAX_DAYS_IN_RANGE = env.int("DJANGO_FINDINGS_MAX_DAYS_IN_RANGE", 7)
# API export settings
DJANGO_TMP_OUTPUT_DIRECTORY = env.str(
"DJANGO_TMP_OUTPUT_DIRECTORY", "/tmp/prowler_api_output"
)
DJANGO_FINDINGS_BATCH_SIZE = env.str("DJANGO_FINDINGS_BATCH_SIZE", 1000)
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID = env.str("DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID", "")
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = env.str(
"DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY", ""
)
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = env.str("DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN", "")
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = env.str("DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION", "")

View File

@@ -1,7 +1,6 @@
from config.django.base import * # noqa
from config.env import env
DEBUG = env.bool("DJANGO_DEBUG", default=True)
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["*"])

View File

@@ -1,7 +1,6 @@
from config.django.base import * # noqa
from config.env import env
DEBUG = env.bool("DJANGO_DEBUG", default=False)
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])

View File

@@ -1,7 +1,6 @@
from config.django.base import * # noqa
from config.env import env
DEBUG = env.bool("DJANGO_DEBUG", default=False)
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])

View File

@@ -0,0 +1,54 @@
import sentry_sdk
from config.env import env
IGNORED_EXCEPTIONS = [
# Authentication Errors from AWS
"InvalidToken",
"AccessDeniedException",
"AuthorizationErrorException",
"UnrecognizedClientException",
"UnauthorizedOperation",
"AuthFailure",
"InvalidClientTokenId",
"AccessDenied",
# Shodan Check
"No Shodan API Key",
# For now we don't want to log the RequestLimitExceeded errors
"RequestLimitExceeded",
"ThrottlingException",
"Rate exceeded",
# The following comes from urllib3
# eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
"Pool is closed",
]
def before_send(event, hint):
"""
before_send handles the Sentry events in order to sent them or not
"""
# Ignore logs with the ignored_exceptions
# https://docs.python.org/3/library/logging.html#logrecord-objects
if "log_record" in hint:
log_msg = hint["log_record"].msg
log_lvl = hint["log_record"].levelno
# Handle Error events and discard the rest
if log_lvl == 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
return
return event
sentry_sdk.init(
dsn=env.str("DJANGO_SENTRY_DSN", ""),
# Add data like request headers and IP for users,
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
before_send=before_send,
send_default_pii=True,
_experiments={
# Set continuous_profiling_auto_start to True
# to automatically start the profiler on when
# possible.
"continuous_profiling_auto_start": True,
},
)

View File

@@ -0,0 +1,53 @@
from config.env import env
# Google Oauth settings
GOOGLE_OAUTH_CLIENT_ID = env("DJANGO_GOOGLE_OAUTH_CLIENT_ID", default="")
GOOGLE_OAUTH_CLIENT_SECRET = env("DJANGO_GOOGLE_OAUTH_CLIENT_SECRET", default="")
GOOGLE_OAUTH_CALLBACK_URL = env("DJANGO_GOOGLE_OAUTH_CALLBACK_URL", default="")
GITHUB_OAUTH_CLIENT_ID = env("DJANGO_GITHUB_OAUTH_CLIENT_ID", default="")
GITHUB_OAUTH_CLIENT_SECRET = env("DJANGO_GITHUB_OAUTH_CLIENT_SECRET", default="")
GITHUB_OAUTH_CALLBACK_URL = env("DJANGO_GITHUB_OAUTH_CALLBACK_URL", default="")
# Allauth settings
ACCOUNT_LOGIN_METHODS = {"email"} # Use Email / Password authentication
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "none" # Do not require email confirmation
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
REST_AUTH = {
"TOKEN_MODEL": None,
"REST_USE_JWT": True,
}
# django-allauth (social)
# Authenticate if local account with this email address already exists
SOCIALACCOUNT_EMAIL_AUTHENTICATION = True
# Connect local account and social account if local account with that email address already exists
SOCIALACCOUNT_EMAIL_AUTHENTICATION_AUTO_CONNECT = True
SOCIALACCOUNT_ADAPTER = "api.adapters.ProwlerSocialAccountAdapter"
SOCIALACCOUNT_PROVIDERS = {
"google": {
"APP": {
"client_id": GOOGLE_OAUTH_CLIENT_ID,
"secret": GOOGLE_OAUTH_CLIENT_SECRET,
"key": "",
},
"SCOPE": [
"email",
"profile",
],
"AUTH_PARAMS": {
"access_type": "online",
},
},
"github": {
"APP": {
"client_id": GITHUB_OAUTH_CLIENT_ID,
"secret": GITHUB_OAUTH_CLIENT_SECRET,
},
"SCOPE": [
"user",
"read:org",
],
},
}

View File

@@ -486,7 +486,7 @@ def scans_fixture(tenants_fixture, providers_fixture):
name="Scan 1",
provider=provider,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.AVAILABLE,
state=StateChoices.COMPLETED,
tenant_id=tenant.id,
started_at="2024-01-02T00:00:00Z",
)
@@ -626,6 +626,7 @@ def findings_fixture(scans_fixture, resources_fixture):
"CheckId": "test_check_id",
"Description": "test description apple sauce",
},
first_seen_at="2024-01-02T00:00:00Z",
)
finding1.add_resources([resource1])
@@ -651,6 +652,7 @@ def findings_fixture(scans_fixture, resources_fixture):
"CheckId": "test_check_id",
"Description": "test description orange juice",
},
first_seen_at="2024-01-02T00:00:00Z",
)
finding2.add_resources([resource2])

View File

@@ -5,10 +5,14 @@ from django_celery_beat.models import IntervalSchedule, PeriodicTask
from rest_framework_json_api.serializers import ValidationError
from tasks.tasks import perform_scheduled_scan_task
from api.models import Provider
from api.db_utils import rls_transaction
from api.models import Provider, Scan, StateChoices
def schedule_provider_scan(provider_instance: Provider):
tenant_id = str(provider_instance.tenant_id)
provider_id = str(provider_instance.id)
schedule, _ = IntervalSchedule.objects.get_or_create(
every=24,
period=IntervalSchedule.HOURS,
@@ -17,23 +21,9 @@ def schedule_provider_scan(provider_instance: Provider):
# Create a unique name for the periodic task
task_name = f"scan-perform-scheduled-{provider_instance.id}"
# Schedule the task
_, created = PeriodicTask.objects.get_or_create(
interval=schedule,
name=task_name,
task="scan-perform-scheduled",
kwargs=json.dumps(
{
"tenant_id": str(provider_instance.tenant_id),
"provider_id": str(provider_instance.id),
}
),
one_off=False,
defaults={
"start_time": datetime.now(timezone.utc) + timedelta(hours=24),
},
)
if not created:
if PeriodicTask.objects.filter(
interval=schedule, name=task_name, task="scan-perform-scheduled"
).exists():
raise ValidationError(
[
{
@@ -45,9 +35,36 @@ def schedule_provider_scan(provider_instance: Provider):
]
)
with rls_transaction(tenant_id):
scheduled_scan = Scan.objects.create(
tenant_id=tenant_id,
name="Daily scheduled scan",
provider_id=provider_id,
trigger=Scan.TriggerChoices.SCHEDULED,
state=StateChoices.AVAILABLE,
scheduled_at=datetime.now(timezone.utc),
)
# Schedule the task
periodic_task_instance = PeriodicTask.objects.create(
interval=schedule,
name=task_name,
task="scan-perform-scheduled",
kwargs=json.dumps(
{
"tenant_id": tenant_id,
"provider_id": provider_id,
}
),
one_off=False,
start_time=datetime.now(timezone.utc) + timedelta(hours=24),
)
scheduled_scan.scheduler_task_id = periodic_task_instance.id
scheduled_scan.save()
return perform_scheduled_scan_task.apply_async(
kwargs={
"tenant_id": str(provider_instance.tenant_id),
"provider_id": str(provider_instance.id),
"provider_id": provider_id,
},
)

View File

@@ -0,0 +1,156 @@
import os
import zipfile
import boto3
import config.django.base as base
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
from celery.utils.log import get_task_logger
from django.conf import settings
from prowler.config.config import (
csv_file_suffix,
html_file_suffix,
json_ocsf_file_suffix,
output_file_timestamp,
)
from prowler.lib.outputs.csv.csv import CSV
from prowler.lib.outputs.html.html import HTML
from prowler.lib.outputs.ocsf.ocsf import OCSF
logger = get_task_logger(__name__)
# Predefined mapping for output formats and their configurations
OUTPUT_FORMATS_MAPPING = {
"csv": {
"class": CSV,
"suffix": csv_file_suffix,
"kwargs": {},
},
"json-ocsf": {"class": OCSF, "suffix": json_ocsf_file_suffix, "kwargs": {}},
"html": {"class": HTML, "suffix": html_file_suffix, "kwargs": {"stats": {}}},
}
def _compress_output_files(output_directory: str) -> str:
"""
Compress output files from all configured output formats into a ZIP archive.
Args:
output_directory (str): The directory where the output files are located.
The function looks up all known suffixes in OUTPUT_FORMATS_MAPPING
and compresses those files into a single ZIP.
Returns:
str: The full path to the newly created ZIP archive.
"""
zip_path = f"{output_directory}.zip"
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
for suffix in [config["suffix"] for config in OUTPUT_FORMATS_MAPPING.values()]:
zipf.write(
f"{output_directory}{suffix}",
f"output/{output_directory.split('/')[-1]}{suffix}",
)
return zip_path
def get_s3_client():
"""
Create and return a boto3 S3 client using AWS credentials from environment variables.
This function attempts to initialize an S3 client by reading the AWS access key, secret key,
session token, and region from environment variables. It then validates the client by listing
available S3 buckets. If an error occurs during this process (for example, due to missing or
invalid credentials), it falls back to creating an S3 client without explicitly provided credentials,
which may rely on other configuration sources (e.g., IAM roles).
Returns:
boto3.client: A configured S3 client instance.
Raises:
ClientError, NoCredentialsError, or ParamValidationError if both attempts to create a client fail.
"""
s3_client = None
try:
s3_client = boto3.client(
"s3",
aws_access_key_id=settings.DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY,
aws_session_token=settings.DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN,
region_name=settings.DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION,
)
s3_client.list_buckets()
except (ClientError, NoCredentialsError, ParamValidationError, ValueError):
s3_client = boto3.client("s3")
s3_client.list_buckets()
return s3_client
def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str:
"""
Upload the specified ZIP file to an S3 bucket.
If the S3 bucket environment variables are not configured,
the function returns None without performing an upload.
Args:
tenant_id (str): The tenant identifier, used as part of the S3 key prefix.
zip_path (str): The local file system path to the ZIP file to be uploaded.
scan_id (str): The scan identifier, used as part of the S3 key prefix.
Returns:
str: The S3 URI of the uploaded file (e.g., "s3://<bucket>/<key>") if successful.
None: If the required environment variables for the S3 bucket are not set.
Raises:
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
"""
if not base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET:
return
try:
s3 = get_s3_client()
s3_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
s3.upload_file(
Filename=zip_path,
Bucket=base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET,
Key=s3_key,
)
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{s3_key}"
except (ClientError, NoCredentialsError, ParamValidationError, ValueError) as e:
logger.error(f"S3 upload failed: {str(e)}")
def _generate_output_directory(
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
) -> str:
"""
Generate a file system path for the output directory of a prowler scan.
This function constructs the output directory path by combining a base
temporary output directory, the tenant ID, the scan ID, and details about
the prowler provider along with a timestamp. The resulting path is used to
store the output files of a prowler scan.
Note:
This function depends on one external variable:
- `output_file_timestamp`: A timestamp (as a string) used to uniquely identify the output.
Args:
output_directory (str): The base output directory.
prowler_provider (object): An identifier or descriptor for the prowler provider.
Typically, this is a string indicating the provider (e.g., "aws").
tenant_id (str): The unique identifier for the tenant.
scan_id (str): The unique identifier for the scan.
Returns:
str: The constructed file system path for the prowler scan output directory.
Example:
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56'
"""
path = (
f"{output_directory}/{tenant_id}/{scan_id}/prowler-output-"
f"{prowler_provider}-{output_file_timestamp}"
)
os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
return path

View File

@@ -152,6 +152,9 @@ def perform_prowler_scan(
for progress, findings in prowler_scan.scan():
for finding in findings:
if finding is None:
logger.error(f"None finding detected on scan {scan_id}.")
continue
for attempt in range(CELERY_DEADLOCK_ATTEMPTS):
try:
with rls_transaction(tenant_id):
@@ -176,7 +179,10 @@ def perform_prowler_scan(
# Update resource fields if necessary
updated_fields = []
if resource_instance.region != finding.region:
if (
finding.region
and resource_instance.region != finding.region
):
resource_instance.region = finding.region
updated_fields.append("region")
if resource_instance.service != finding.service_name:
@@ -219,24 +225,33 @@ def perform_prowler_scan(
# Process finding
with rls_transaction(tenant_id):
finding_uid = finding.uid
last_first_seen_at = None
if finding_uid not in last_status_cache:
most_recent_finding = (
Finding.objects.filter(uid=finding_uid)
.order_by("-id")
.values("status")
Finding.all_objects.filter(
tenant_id=tenant_id, uid=finding_uid
)
.order_by("-inserted_at")
.values("status", "first_seen_at")
.first()
)
last_status = (
most_recent_finding["status"]
if most_recent_finding
else None
)
last_status_cache[finding_uid] = last_status
last_status = None
if most_recent_finding:
last_status = most_recent_finding["status"]
last_first_seen_at = most_recent_finding["first_seen_at"]
last_status_cache[finding_uid] = last_status, last_first_seen_at
else:
last_status = last_status_cache[finding_uid]
last_status, last_first_seen_at = last_status_cache[finding_uid]
status = FindingStatus[finding.status]
delta = _create_finding_delta(last_status, status)
# For the findings prior to the change, when a first finding is found with delta!="new" it will be
# assigned a current date as first_seen_at and the successive findings with the same UID will
# always get the date of the previous finding.
# For new findings, when a finding (delta="new") is found for the first time, the first_seen_at
# attribute will be assigned the current date, the following findings will get that date.
if not last_first_seen_at:
last_first_seen_at = datetime.now(tz=timezone.utc)
# Create the finding
finding_instance = Finding.objects.create(
@@ -251,6 +266,7 @@ def perform_prowler_scan(
raw_result=finding.raw,
check_id=finding.check_id,
scan=scan_instance,
first_seen_at=last_first_seen_at,
)
finding_instance.add_resources([resource_instance])
@@ -328,9 +344,18 @@ def perform_prowler_scan(
total_requirements=compliance["total_requirements"],
)
)
with rls_transaction(tenant_id):
ComplianceOverview.objects.bulk_create(compliance_overview_objects)
try:
with rls_transaction(tenant_id):
ComplianceOverview.objects.bulk_create(
compliance_overview_objects, batch_size=100
)
except Exception as overview_exception:
import sentry_sdk
sentry_sdk.capture_exception(overview_exception)
logger.error(
f"Error storing compliance overview for scan {scan_id}: {overview_exception}"
)
if exception is not None:
raise exception
@@ -367,7 +392,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
- muted_changed: Muted findings with a delta of 'changed'.
"""
with rls_transaction(tenant_id):
findings = Finding.objects.filter(scan_id=scan_id)
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
aggregation = findings.values(
"check_id",

View File

@@ -1,15 +1,28 @@
from datetime import datetime, timedelta, timezone
from shutil import rmtree
from celery import shared_task
from celery import chain, shared_task
from celery.utils.log import get_task_logger
from config.celery import RLSTask
from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIRECTORY
from django_celery_beat.models import PeriodicTask
from tasks.jobs.connection import check_provider_connection
from tasks.jobs.deletion import delete_provider, delete_tenant
from tasks.jobs.export import (
OUTPUT_FORMATS_MAPPING,
_compress_output_files,
_generate_output_directory,
_upload_to_s3,
)
from tasks.jobs.scan import aggregate_findings, perform_prowler_scan
from tasks.utils import batched, get_next_execution_datetime
from api.db_utils import rls_transaction
from api.decorators import set_tenant
from api.models import Provider, Scan
from api.models import Finding, Provider, Scan, ScanSummary, StateChoices
from api.utils import initialize_prowler_provider
from prowler.lib.outputs.finding import Finding as FindingOutput
logger = get_task_logger(__name__)
@shared_task(base=RLSTask, name="provider-connection-check")
@@ -29,7 +42,7 @@ def check_provider_connection_task(provider_id: str):
return check_provider_connection(provider_id=provider_id)
@shared_task(base=RLSTask, name="provider-deletion")
@shared_task(base=RLSTask, name="provider-deletion", queue="deletion")
@set_tenant
def delete_provider_task(provider_id: str):
"""
@@ -69,13 +82,22 @@ def perform_scan_task(
Returns:
dict: The result of the scan execution, typically including the status and results of the performed checks.
"""
return perform_prowler_scan(
result = perform_prowler_scan(
tenant_id=tenant_id,
scan_id=scan_id,
provider_id=provider_id,
checks_to_execute=checks_to_execute,
)
chain(
perform_scan_summary_task.si(tenant_id, scan_id),
generate_outputs.si(
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
),
).apply_async()
return result
@shared_task(base=RLSTask, bind=True, name="scan-perform-scheduled", queue="scans")
def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
@@ -100,34 +122,49 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
task_id = self.request.id
with rls_transaction(tenant_id):
provider_instance = Provider.objects.get(pk=provider_id)
periodic_task_instance = PeriodicTask.objects.get(
name=f"scan-perform-scheduled-{provider_id}"
)
next_scan_date = datetime.combine(
datetime.now(timezone.utc), periodic_task_instance.start_time.time()
) + timedelta(hours=24)
scan_instance = Scan.objects.create(
next_scan_datetime = get_next_execution_datetime(task_id, provider_id)
scan_instance, _ = Scan.objects.get_or_create(
tenant_id=tenant_id,
name="Daily scheduled scan",
provider=provider_instance,
provider_id=provider_id,
trigger=Scan.TriggerChoices.SCHEDULED,
next_scan_at=next_scan_date,
task_id=task_id,
state__in=(StateChoices.SCHEDULED, StateChoices.AVAILABLE),
scheduler_task_id=periodic_task_instance.id,
defaults={"state": StateChoices.SCHEDULED},
)
result = perform_prowler_scan(
tenant_id=tenant_id,
scan_id=str(scan_instance.id),
provider_id=provider_id,
)
perform_scan_summary_task.apply_async(
kwargs={
"tenant_id": tenant_id,
"scan_id": str(scan_instance.id),
}
)
scan_instance.task_id = task_id
scan_instance.save()
try:
result = perform_prowler_scan(
tenant_id=tenant_id,
scan_id=str(scan_instance.id),
provider_id=provider_id,
)
except Exception as e:
raise e
finally:
with rls_transaction(tenant_id):
Scan.objects.get_or_create(
tenant_id=tenant_id,
name="Daily scheduled scan",
provider_id=provider_id,
trigger=Scan.TriggerChoices.SCHEDULED,
state=StateChoices.SCHEDULED,
scheduled_at=next_scan_datetime,
scheduler_task_id=periodic_task_instance.id,
)
chain(
perform_scan_summary_task.si(tenant_id, scan_instance.id),
generate_outputs.si(
scan_id=str(scan_instance.id), provider_id=provider_id, tenant_id=tenant_id
),
).apply_async()
return result
@@ -136,6 +173,107 @@ def perform_scan_summary_task(tenant_id: str, scan_id: str):
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
@shared_task(name="tenant-deletion")
@shared_task(name="tenant-deletion", queue="deletion")
def delete_tenant_task(tenant_id: str):
return delete_tenant(pk=tenant_id)
@shared_task(
base=RLSTask,
name="scan-report",
queue="scan-reports",
)
@set_tenant(keep_tenant=True)
def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
"""
Process findings in batches and generate output files in multiple formats.
This function retrieves findings associated with a scan, processes them
in batches of 50, and writes each batch to the corresponding output files.
It reuses output writer instances across batches, updates them with each
batch of transformed findings, and uses a flag to indicate when the final
batch is being processed. Finally, the output files are compressed and
uploaded to S3.
Args:
tenant_id (str): The tenant identifier.
scan_id (str): The scan identifier.
provider_id (str): The provider_id id to be used in generating outputs.
"""
# Initialize the prowler provider
prowler_provider = initialize_prowler_provider(Provider.objects.get(id=provider_id))
# Get the provider UID
provider_uid = Provider.objects.get(id=provider_id).uid
# Generate and ensure the output directory exists
output_directory = _generate_output_directory(
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
)
# Define auxiliary variables
output_writers = {}
scan_summary = FindingOutput._transform_findings_stats(
ScanSummary.objects.filter(scan_id=scan_id)
)
# Retrieve findings queryset
findings_qs = Finding.all_objects.filter(scan_id=scan_id).order_by("uid")
# Process findings in batches
for batch, is_last_batch in batched(
findings_qs.iterator(), DJANGO_FINDINGS_BATCH_SIZE
):
finding_outputs = [
FindingOutput.transform_api_finding(finding, prowler_provider)
for finding in batch
]
# Generate output files
for mode, config in OUTPUT_FORMATS_MAPPING.items():
kwargs = dict(config.get("kwargs", {}))
if mode == "html":
kwargs["provider"] = prowler_provider
kwargs["stats"] = scan_summary
writer_class = config["class"]
if writer_class in output_writers:
writer = output_writers[writer_class]
writer.transform(finding_outputs)
writer.close_file = is_last_batch
else:
writer = writer_class(
findings=finding_outputs,
file_path=output_directory,
file_extension=config["suffix"],
from_cli=False,
)
writer.close_file = is_last_batch
output_writers[writer_class] = writer
# Write the current batch using the writer
writer.batch_write_data_to_file(**kwargs)
# TODO: Refactor the output classes to avoid this manual reset
writer._data = []
# Compress output files
output_directory = _compress_output_files(output_directory)
# Save to configured storage
uploaded = _upload_to_s3(tenant_id, output_directory, scan_id)
if uploaded:
output_directory = uploaded
uploaded = True
# Remove the local files after upload
rmtree(DJANGO_TMP_OUTPUT_DIRECTORY, ignore_errors=True)
else:
uploaded = False
# Update the scan instance with the output path
Scan.all_objects.filter(id=scan_id).update(output_location=output_directory)
logger.info(f"Scan output files generated, output location: {output_directory}")
return {"upload": uploaded}

View File

@@ -6,6 +6,8 @@ from django_celery_beat.models import IntervalSchedule, PeriodicTask
from rest_framework_json_api.serializers import ValidationError
from tasks.beat import schedule_provider_scan
from api.models import Scan
@pytest.mark.django_db
class TestScheduleProviderScan:
@@ -15,9 +17,11 @@ class TestScheduleProviderScan:
with patch(
"tasks.tasks.perform_scheduled_scan_task.apply_async"
) as mock_apply_async:
assert Scan.all_objects.count() == 0
result = schedule_provider_scan(provider_instance)
assert result is not None
assert Scan.all_objects.count() == 1
mock_apply_async.assert_called_once_with(
kwargs={

View File

@@ -0,0 +1,102 @@
from datetime import datetime, timedelta, timezone
from unittest.mock import patch
import pytest
from django_celery_beat.models import IntervalSchedule, PeriodicTask
from django_celery_results.models import TaskResult
from tasks.utils import batched, get_next_execution_datetime
@pytest.mark.django_db
class TestGetNextExecutionDatetime:
@pytest.fixture
def setup_periodic_task(self, db):
# Create a periodic task with an hourly interval
interval = IntervalSchedule.objects.create(
every=1, period=IntervalSchedule.HOURS
)
periodic_task = PeriodicTask.objects.create(
name="scan-perform-scheduled-123",
task="scan-perform-scheduled",
interval=interval,
)
return periodic_task
@pytest.fixture
def setup_task_result(self, db):
# Create a task result record
task_result = TaskResult.objects.create(
task_id="abc123",
task_name="scan-perform-scheduled",
status="SUCCESS",
date_created=datetime.now(timezone.utc) - timedelta(hours=1),
result="Success",
)
return task_result
def test_get_next_execution_datetime_success(
self, setup_task_result, setup_periodic_task
):
task_result = setup_task_result
periodic_task = setup_periodic_task
# Mock periodic_task_name on TaskResult
with patch.object(
TaskResult, "periodic_task_name", return_value=periodic_task.name
):
next_execution = get_next_execution_datetime(
task_id=task_result.task_id, provider_id="123"
)
expected_time = task_result.date_created + timedelta(hours=1)
assert next_execution == expected_time
def test_get_next_execution_datetime_fallback_to_provider_id(
self, setup_task_result, setup_periodic_task
):
task_result = setup_task_result
# Simulate the case where `periodic_task_name` is missing
with patch.object(TaskResult, "periodic_task_name", return_value=None):
next_execution = get_next_execution_datetime(
task_id=task_result.task_id, provider_id="123"
)
expected_time = task_result.date_created + timedelta(hours=1)
assert next_execution == expected_time
def test_get_next_execution_datetime_periodic_task_does_not_exist(
self, setup_task_result
):
task_result = setup_task_result
with pytest.raises(PeriodicTask.DoesNotExist):
get_next_execution_datetime(
task_id=task_result.task_id, provider_id="nonexistent"
)
class TestBatchedFunction:
def test_empty_iterable(self):
result = list(batched([], 3))
assert result == [([], True)]
def test_exact_batches(self):
result = list(batched([1, 2, 3, 4], 2))
expected = [([1, 2], False), ([3, 4], False), ([], True)]
assert result == expected
def test_inexact_batches(self):
result = list(batched([1, 2, 3, 4, 5], 2))
expected = [([1, 2], False), ([3, 4], False), ([5], True)]
assert result == expected
def test_batch_size_one(self):
result = list(batched([1, 2, 3], 1))
expected = [([1], False), ([2], False), ([3], False), ([], True)]
assert result == expected
def test_batch_size_greater_than_length(self):
result = list(batched([1, 2, 3], 5))
expected = [([1, 2, 3], True)]
assert result == expected

View File

@@ -0,0 +1,50 @@
from datetime import datetime, timedelta, timezone
from django_celery_beat.models import PeriodicTask
from django_celery_results.models import TaskResult
def get_next_execution_datetime(task_id: int, provider_id: str) -> datetime:
task_instance = TaskResult.objects.get(task_id=task_id)
try:
periodic_task_instance = PeriodicTask.objects.get(
name=task_instance.periodic_task_name
)
except PeriodicTask.DoesNotExist:
periodic_task_instance = PeriodicTask.objects.get(
name=f"scan-perform-scheduled-{provider_id}"
)
interval = periodic_task_instance.interval
current_scheduled_time = datetime.combine(
datetime.now(timezone.utc).date(),
task_instance.date_created.time(),
tzinfo=timezone.utc,
)
return current_scheduled_time + timedelta(**{interval.period: interval.every})
def batched(iterable, batch_size):
"""
Yield successive batches from an iterable.
Args:
iterable: An iterable source of items.
batch_size (int): The number of items per batch.
Yields:
tuple: A pair (batch, is_last_batch) where:
- batch (list): A list of items (with length equal to batch_size,
except possibly for the last batch).
- is_last_batch (bool): True if this is the final batch, False otherwise.
"""
batch = []
for item in iterable:
batch.append(item)
if len(batch) == batch_size:
yield batch, False
batch = []
yield batch, True

View File

@@ -0,0 +1,24 @@
apiVersion: v2
name: prowler-api
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "5.1.1"

View File

@@ -0,0 +1,22 @@
1. Get the application URL by running these commands:
{{- if .Values.ingress.enabled }}
{{- range $host := .Values.ingress.hosts }}
{{- range .paths }}
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
{{- end }}
{{- end }}
{{- else if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "prowler-api.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch its status by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "prowler-api.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "prowler-api.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "prowler-api.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}

View File

@@ -0,0 +1,62 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "prowler-api.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "prowler-api.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "prowler-api.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "prowler-api.labels" -}}
helm.sh/chart: {{ include "prowler-api.chart" . }}
{{ include "prowler-api.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "prowler-api.selectorLabels" -}}
app.kubernetes.io/name: {{ include "prowler-api.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "prowler-api.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "prowler-api.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,9 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "prowler-api.fullname" . }}-config
labels:
{{- include "prowler-api.labels" . | nindent 4 }}
data:
config.yaml: |-
{{- toYaml .Values.mainConfig | nindent 4 }}

View File

@@ -0,0 +1,85 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "prowler-api.fullname" . }}
labels:
{{- include "prowler-api.labels" . | nindent 4 }}
spec:
{{- if not .Values.autoscaling.enabled }}
replicas: {{ .Values.replicaCount }}
{{- end }}
selector:
matchLabels:
{{- include "prowler-api.selectorLabels" . | nindent 6 }}
template:
metadata:
annotations:
checksum/secrets: {{ include (print $.Template.BasePath "/secrets.yaml") . | sha256sum }}
checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }}
{{- with .Values.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "prowler-api.labels" . | nindent 8 }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "prowler-api.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
{{- range $name,$config := .Values.containers }}
{{- if $config.enabled }}
- name: {{ $name }}
securityContext:
{{- toYaml $config.securityContext | nindent 12 }}
image: "{{ $config.image.repository }}:{{ $config.image.tag | default $.Chart.AppVersion }}"
imagePullPolicy: {{ $config.image.pullPolicy }}
envFrom:
- secretRef:
name: {{ include "prowler-api.fullname" $ }}
command:
{{- toYaml $config.command | nindent 12 }}
{{- if $config.ports }}
ports:
{{- toYaml $config.ports | nindent 12 }}
{{- end }}
livenessProbe:
{{- toYaml $config.livenessProbe | nindent 12 }}
readinessProbe:
{{- toYaml $config.readinessProbe | nindent 12 }}
resources:
{{- toYaml $config.resources | nindent 12 }}
volumeMounts:
- name: {{ include "prowler-api.fullname" $ }}-config
mountPath: {{ $.Values.releaseConfigRoot }}{{ $.Values.releaseConfigPath }}
subPath: config.yaml
{{- with .volumeMounts }}
{{- toYaml . | nindent 12 }}
{{- end }}
{{- end }}
{{- end }}
volumes:
- name: {{ include "prowler-api.fullname" . }}-config
configMap:
name: {{ include "prowler-api.fullname" . }}-config
{{- with .Values.volumes }}
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}

View File

@@ -0,0 +1,43 @@
{{- if .Values.ingress.enabled -}}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: {{ include "prowler-api.fullname" . }}
labels:
{{- include "prowler-api.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- with .Values.ingress.className }}
ingressClassName: {{ . }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ .host | quote }}
http:
paths:
{{- range .paths }}
- path: {{ .path }}
{{- with .pathType }}
pathType: {{ . }}
{{- end }}
backend:
service:
name: {{ include "prowler-api.fullname" $ }}
port:
number: {{ $.Values.service.port }}
{{- end }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,11 @@
apiVersion: v1
kind: Secret
metadata:
name: {{ include "prowler-api.fullname" . }}
labels:
{{- include "prowler-api.labels" . | nindent 4 }}
type: Opaque
data:
{{- range $k, $v := .Values.secrets }}
{{ $k }}: {{ $v | toString | b64enc | quote }}
{{- end }}

View File

@@ -0,0 +1,21 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "prowler-api.fullname" . }}
labels:
{{- include "prowler-api.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
{{- range $name,$config := .Values.containers }}
{{- if $config.ports }}
{{- range $p := $config.ports }}
- port: {{ $p.containerPort }}
targetPort: {{ $p.containerPort }}
protocol: TCP
name: {{ $config.name }}
{{- end }}
{{- end }}
{{- end }}
selector:
{{- include "prowler-api.selectorLabels" . | nindent 4 }}

View File

@@ -0,0 +1,13 @@
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "prowler-api.serviceAccountName" . }}
labels:
{{- include "prowler-api.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
automountServiceAccountToken: {{ .Values.serviceAccount.automount }}
{{- end }}

View File

@@ -0,0 +1,625 @@
# Default values for prowler-api.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
# This will set the replicaset count more information can be found here: https://kubernetes.io/docs/concepts/workloads/controllers/replicaset/
replicaCount: 1
# This sets the container image more information can be found here: https://kubernetes.io/docs/concepts/containers/images/
containers:
prowler-api:
enabled: true
image:
repository: prowlercloud/prowler-api
pullPolicy: IfNotPresent
ports:
- name: http
containerPort: 8080
protocol: TCP
command: ["/home/prowler/docker-entrypoint.sh", "prod"]
worker:
enabled: true
image:
repository: prowlercloud/prowler-api
pullPolicy: IfNotPresent
command: ["/home/prowler/docker-entrypoint.sh", "worker"]
worker-beat:
enabled: true
image:
repository: prowlercloud/prowler-api
pullPolicy: IfNotPresent
command: ["../docker-entrypoint.sh", "beat"]
secrets:
POSTGRES_HOST:
POSTGRES_PORT: 5432
POSTGRES_ADMIN_USER:
POSTGRES_ADMIN_PASSWORD:
POSTGRES_USER:
POSTGRES_PASSWORD:
POSTGRES_DB:
# Valkey settings
VALKEY_HOST: valkey-headless
VALKEY_PORT: "6379"
VALKEY_DB: "0"
# Django settings
DJANGO_ALLOWED_HOSTS: localhost,127.0.0.1,prowler-api
DJANGO_BIND_ADDRESS: 0.0.0.0
DJANGO_PORT: "8080"
DJANGO_DEBUG: False
DJANGO_SETTINGS_MODULE: config.django.production
# Select one of [ndjson|human_readable]
DJANGO_LOGGING_FORMATTER: human_readable
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
# Applies to both Django and Celery Workers
DJANGO_LOGGING_LEVEL: INFO
# Defaults to the maximum available based on CPU cores if not set.
DJANGO_WORKERS: 2
# Token lifetime is in minutes
DJANGO_ACCESS_TOKEN_LIFETIME: "30"
# Token lifetime is in minutes
DJANGO_REFRESH_TOKEN_LIFETIME: "1440"
DJANGO_CACHE_MAX_AGE: "3600"
DJANGO_STALE_WHILE_REVALIDATE: "60"
DJANGO_MANAGE_DB_PARTITIONS: "False"
# openssl genrsa -out private.pem 2048
DJANGO_TOKEN_SIGNING_KEY:
# openssl rsa -in private.pem -pubout -out public.pem
DJANGO_TOKEN_VERIFYING_KEY:
# openssl rand -base64 32
DJANGO_SECRETS_ENCRYPTION_KEY:
DJANGO_BROKER_VISIBILITY_TIMEOUT: 86400
releaseConfigRoot: /home/prowler/.cache/pypoetry/virtualenvs/prowler-api-NnJNioq7-py3.12/lib/python3.12/site-packages/
releaseConfigPath: prowler/config/config.yaml
mainConfig:
# AWS Configuration
aws:
# AWS Global Configuration
# aws.mute_non_default_regions --> Set to True to muted failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
mute_non_default_regions: False
# If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`:
# Mutelist:
# Accounts:
# "*":
# Checks:
# "*":
# Regions:
# - "ap-southeast-1"
# - "ap-southeast-2"
# Resources:
# - "*"
# AWS IAM Configuration
# aws.iam_user_accesskey_unused --> CIS recommends 45 days
max_unused_access_keys_days: 45
# aws.iam_user_console_access_unused --> CIS recommends 45 days
max_console_access_days: 45
# AWS EC2 Configuration
# aws.ec2_elastic_ip_shodan
# TODO: create common config
shodan_api_key: null
# aws.ec2_securitygroup_with_many_ingress_egress_rules --> by default is 50 rules
max_security_group_rules: 50
# aws.ec2_instance_older_than_specific_days --> by default is 6 months (180 days)
max_ec2_instance_age_in_days: 180
# aws.ec2_securitygroup_allow_ingress_from_internet_to_any_port
# allowed network interface types for security groups open to the Internet
ec2_allowed_interface_types:
[
"api_gateway_managed",
"vpc_endpoint",
]
# allowed network interface owners for security groups open to the Internet
ec2_allowed_instance_owners:
[
"amazon-elb"
]
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
ec2_high_risk_ports:
[
25,
110,
135,
143,
445,
3000,
4333,
5000,
5500,
8080,
8088,
]
# AWS ECS Configuration
# aws.ecs_service_fargate_latest_platform_version
fargate_linux_latest_version: "1.4.0"
fargate_windows_latest_version: "1.0.0"
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
trusted_account_ids: []
# AWS Cloudwatch Configuration
# aws.cloudwatch_log_group_retention_policy_specific_days_enabled --> by default is 365 days
log_group_retention_days: 365
# AWS CloudFormation Configuration
# cloudformation_stack_cdktoolkit_bootstrap_version --> by default is 21
recommended_cdk_bootstrap_version: 21
# AWS AppStream Session Configuration
# aws.appstream_fleet_session_idle_disconnect_timeout
max_idle_disconnect_timeout_in_seconds: 600 # 10 Minutes
# aws.appstream_fleet_session_disconnect_timeout
max_disconnect_timeout_in_seconds: 300 # 5 Minutes
# aws.appstream_fleet_maximum_session_duration
max_session_duration_seconds: 36000 # 10 Hours
# AWS Lambda Configuration
# aws.awslambda_function_using_supported_runtimes
obsolete_lambda_runtimes:
[
"java8",
"go1.x",
"provided",
"python3.6",
"python2.7",
"python3.7",
"nodejs4.3",
"nodejs4.3-edge",
"nodejs6.10",
"nodejs",
"nodejs8.10",
"nodejs10.x",
"nodejs12.x",
"nodejs14.x",
"nodejs16.x",
"dotnet5.0",
"dotnet7",
"dotnetcore1.0",
"dotnetcore2.0",
"dotnetcore2.1",
"dotnetcore3.1",
"ruby2.5",
"ruby2.7",
]
# aws.awslambda_function_vpc_is_in_multi_azs
lambda_min_azs: 2
# AWS Organizations
# aws.organizations_scp_check_deny_regions
# aws.organizations_enabled_regions: [
# "eu-central-1",
# "eu-west-1",
# "us-east-1"
# ]
organizations_enabled_regions: []
organizations_trusted_delegated_administrators: []
# AWS ECR
# aws.ecr_repositories_scan_vulnerabilities_in_latest_image
# CRITICAL
# HIGH
# MEDIUM
ecr_repository_vulnerability_minimum_severity: "MEDIUM"
# AWS Trusted Advisor
# aws.trustedadvisor_premium_support_plan_subscribed
verify_premium_support_plans: True
# AWS CloudTrail Configuration
# aws.cloudtrail_threat_detection_privilege_escalation
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
threat_detection_privilege_escalation_actions:
[
"AddPermission",
"AddRoleToInstanceProfile",
"AddUserToGroup",
"AssociateAccessPolicy",
"AssumeRole",
"AttachGroupPolicy",
"AttachRolePolicy",
"AttachUserPolicy",
"ChangePassword",
"CreateAccessEntry",
"CreateAccessKey",
"CreateDevEndpoint",
"CreateEventSourceMapping",
"CreateFunction",
"CreateGroup",
"CreateJob",
"CreateKeyPair",
"CreateLoginProfile",
"CreatePipeline",
"CreatePolicyVersion",
"CreateRole",
"CreateStack",
"DeleteRolePermissionsBoundary",
"DeleteRolePolicy",
"DeleteUserPermissionsBoundary",
"DeleteUserPolicy",
"DetachRolePolicy",
"DetachUserPolicy",
"GetCredentialsForIdentity",
"GetId",
"GetPolicyVersion",
"GetUserPolicy",
"Invoke",
"ModifyInstanceAttribute",
"PassRole",
"PutGroupPolicy",
"PutPipelineDefinition",
"PutRolePermissionsBoundary",
"PutRolePolicy",
"PutUserPermissionsBoundary",
"PutUserPolicy",
"ReplaceIamInstanceProfileAssociation",
"RunInstances",
"SetDefaultPolicyVersion",
"UpdateAccessKey",
"UpdateAssumeRolePolicy",
"UpdateDevEndpoint",
"UpdateEventSourceMapping",
"UpdateFunctionCode",
"UpdateJob",
"UpdateLoginProfile",
]
# aws.cloudtrail_threat_detection_enumeration
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
threat_detection_enumeration_actions:
[
"DescribeAccessEntry",
"DescribeAccountAttributes",
"DescribeAvailabilityZones",
"DescribeBundleTasks",
"DescribeCarrierGateways",
"DescribeClientVpnRoutes",
"DescribeCluster",
"DescribeDhcpOptions",
"DescribeFlowLogs",
"DescribeImages",
"DescribeInstanceAttribute",
"DescribeInstanceInformation",
"DescribeInstanceTypes",
"DescribeInstances",
"DescribeInstances",
"DescribeKeyPairs",
"DescribeLogGroups",
"DescribeLogStreams",
"DescribeOrganization",
"DescribeRegions",
"DescribeSecurityGroups",
"DescribeSnapshotAttribute",
"DescribeSnapshotTierStatus",
"DescribeSubscriptionFilters",
"DescribeTransitGatewayMulticastDomains",
"DescribeVolumes",
"DescribeVolumesModifications",
"DescribeVpcEndpointConnectionNotifications",
"DescribeVpcs",
"GetAccount",
"GetAccountAuthorizationDetails",
"GetAccountSendingEnabled",
"GetBucketAcl",
"GetBucketLogging",
"GetBucketPolicy",
"GetBucketReplication",
"GetBucketVersioning",
"GetCallerIdentity",
"GetCertificate",
"GetConsoleScreenshot",
"GetCostAndUsage",
"GetDetector",
"GetEbsDefaultKmsKeyId",
"GetEbsEncryptionByDefault",
"GetFindings",
"GetFlowLogsIntegrationTemplate",
"GetIdentityVerificationAttributes",
"GetInstances",
"GetIntrospectionSchema",
"GetLaunchTemplateData",
"GetLaunchTemplateData",
"GetLogRecord",
"GetParameters",
"GetPolicyVersion",
"GetPublicAccessBlock",
"GetQueryResults",
"GetRegions",
"GetSMSAttributes",
"GetSMSSandboxAccountStatus",
"GetSendQuota",
"GetTransitGatewayRouteTableAssociations",
"GetUserPolicy",
"HeadObject",
"ListAccessKeys",
"ListAccounts",
"ListAllMyBuckets",
"ListAssociatedAccessPolicies",
"ListAttachedUserPolicies",
"ListClusters",
"ListDetectors",
"ListDomains",
"ListFindings",
"ListHostedZones",
"ListIPSets",
"ListIdentities",
"ListInstanceProfiles",
"ListObjects",
"ListOrganizationalUnitsForParent",
"ListOriginationNumbers",
"ListPolicyVersions",
"ListRoles",
"ListRoles",
"ListRules",
"ListServiceQuotas",
"ListSubscriptions",
"ListTargetsByRule",
"ListTopics",
"ListUsers",
"LookupEvents",
"Search",
]
# aws.cloudtrail_threat_detection_llm_jacking
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
threat_detection_llm_jacking_actions:
[
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
]
# AWS RDS Configuration
# aws.rds_instance_backup_enabled
# Whether to check RDS instance replicas or not
check_rds_instance_replicas: False
# AWS ACM Configuration
# aws.acm_certificates_expiration_check
days_to_expire_threshold: 7
# aws.acm_certificates_with_secure_key_algorithms
insecure_key_algorithms:
[
"RSA-1024",
"P-192",
"SHA-1",
]
# AWS EKS Configuration
# aws.eks_control_plane_logging_all_types_enabled
# EKS control plane logging types that must be enabled
eks_required_log_types:
[
"api",
"audit",
"authenticator",
"controllerManager",
"scheduler",
]
# aws.eks_cluster_uses_a_supported_version
# EKS clusters must be version 1.28 or higher
eks_cluster_oldest_version_supported: "1.28"
# AWS CodeBuild Configuration
# aws.codebuild_project_no_secrets_in_variables
# CodeBuild sensitive variables that are excluded from the check
excluded_sensitive_environment_variables:
[
]
# AWS ELB Configuration
# aws.elb_is_in_multiple_az
# Minimum number of Availability Zones that an CLB must be in
elb_min_azs: 2
# AWS ELBv2 Configuration
# aws.elbv2_is_in_multiple_az
# Minimum number of Availability Zones that an ELBv2 must be in
elbv2_min_azs: 2
# AWS Secrets Configuration
# Patterns to ignore in the secrets checks
secrets_ignore_patterns: []
# AWS Secrets Manager Configuration
# aws.secretsmanager_secret_unused
# Maximum number of days a secret can be unused
max_days_secret_unused: 90
# aws.secretsmanager_secret_rotated_periodically
# Maximum number of days a secret should be rotated
max_days_secret_unrotated: 90
# AWS Kinesis Configuration
# Minimum retention period in hours for Kinesis streams
min_kinesis_stream_retention_hours: 168 # 7 days
# Azure Configuration
azure:
# Azure Network Configuration
# azure.network_public_ip_shodan
# TODO: create common config
shodan_api_key: null
# Azure App Service
# azure.app_ensure_php_version_is_latest
php_latest_version: "8.2"
# azure.app_ensure_python_version_is_latest
python_latest_version: "3.12"
# azure.app_ensure_java_version_is_latest
java_latest_version: "17"
# Azure SQL Server
# azure.sqlserver_minimal_tls_version
recommended_minimal_tls_versions:
[
"1.2",
"1.3",
]
# GCP Configuration
gcp:
# GCP Compute Configuration
# gcp.compute_public_address_shodan
shodan_api_key: null
# Kubernetes Configuration
kubernetes:
# Kubernetes API Server
# kubernetes.apiserver_audit_log_maxbackup_set
audit_log_maxbackup: 10
# kubernetes.apiserver_audit_log_maxsize_set
audit_log_maxsize: 100
# kubernetes.apiserver_audit_log_maxage_set
audit_log_maxage: 30
# kubernetes.apiserver_strong_ciphers_only
apiserver_strong_ciphers:
[
"TLS_AES_128_GCM_SHA256",
"TLS_AES_256_GCM_SHA384",
"TLS_CHACHA20_POLY1305_SHA256",
]
# Kubelet
# kubernetes.kubelet_strong_ciphers_only
kubelet_strong_ciphers:
[
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
"TLS_RSA_WITH_AES_256_GCM_SHA384",
"TLS_RSA_WITH_AES_128_GCM_SHA256",
]
# This is for the secretes for pulling an image from a private repository more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
imagePullSecrets: []
# This is to override the chart name.
nameOverride: ""
fullnameOverride: ""
#This section builds out the service account more information can be found here: https://kubernetes.io/docs/concepts/security/service-accounts/
serviceAccount:
# Specifies whether a service account should be created
create: true
# Automatically mount a ServiceAccount's API credentials?
automount: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
# This is for setting Kubernetes Annotations to a Pod.
# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/
podAnnotations: {}
# This is for setting Kubernetes Labels to a Pod.
# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/
podLabels: {}
podSecurityContext: {}
# fsGroup: 2000
securityContext: {}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
# This is for setting up a service more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/
service:
# This sets the service type more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types
type: ClusterIP
# This sets the ports more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#field-spec-ports
port: 80
# This block is for setting up the ingress for more information can be found here: https://kubernetes.io/docs/concepts/services-networking/ingress/
ingress:
enabled: false
className: ""
annotations: {}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
# This is to setup the liveness and readiness probes more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/
livenessProbe:
httpGet:
path: /
port: http
readinessProbe:
httpGet:
path: /
port: http
#This section is for setting up autoscaling more information can be found here: https://kubernetes.io/docs/concepts/workloads/autoscaling/
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
# Additional volumes on the output Deployment definition.
volumes: []
# - name: foo
# secret:
# secretName: mysecret
# optional: false
# Additional volumeMounts on the output Deployment definition.
volumeMounts: []
# - name: foo
# mountPath: "/etc/foo"
# readOnly: true
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -39,4 +39,3 @@ spec:
path: {{ $value }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -0,0 +1,6 @@
apiVersion: v2
name: prowler-ui
description: A Helm chart for Kubernetes
type: application
version: 0.1.0
appVersion: "5.1.1"

View File

@@ -0,0 +1,22 @@
1. Get the application URL by running these commands:
{{- if .Values.ingress.enabled }}
{{- range $host := .Values.ingress.hosts }}
{{- range .paths }}
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
{{- end }}
{{- end }}
{{- else if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "prowler-ui.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch its status by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "prowler-ui.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "prowler-ui.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "prowler-ui.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}

View File

@@ -0,0 +1,62 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "prowler-ui.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "prowler-ui.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "prowler-ui.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "prowler-ui.labels" -}}
helm.sh/chart: {{ include "prowler-ui.chart" . }}
{{ include "prowler-ui.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "prowler-ui.selectorLabels" -}}
app.kubernetes.io/name: {{ include "prowler-ui.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "prowler-ui.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "prowler-ui.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,72 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "prowler-ui.fullname" . }}
labels:
{{- include "prowler-ui.labels" . | nindent 4 }}
spec:
{{- if not .Values.autoscaling.enabled }}
replicas: {{ .Values.replicaCount }}
{{- end }}
selector:
matchLabels:
{{- include "prowler-ui.selectorLabels" . | nindent 6 }}
template:
metadata:
annotations:
checksum/config: {{ include (print $.Template.BasePath "/secrets.yaml") . | sha256sum }}
{{- with .Values.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "prowler-ui.labels" . | nindent 8 }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "prowler-ui.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
envFrom:
- secretRef:
name: {{ include "prowler-ui.fullname" $ }}
ports:
- name: http
containerPort: {{ .Values.service.port }}
protocol: TCP
livenessProbe:
{{- toYaml .Values.livenessProbe | nindent 12 }}
readinessProbe:
{{- toYaml .Values.readinessProbe | nindent 12 }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.volumeMounts }}
volumeMounts:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- with .Values.volumes }}
volumes:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}

View File

@@ -0,0 +1,43 @@
{{- if .Values.ingress.enabled -}}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: {{ include "prowler-ui.fullname" . }}
labels:
{{- include "prowler-ui.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- with .Values.ingress.className }}
ingressClassName: {{ . }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ .host | quote }}
http:
paths:
{{- range .paths }}
- path: {{ .path }}
{{- with .pathType }}
pathType: {{ . }}
{{- end }}
backend:
service:
name: {{ include "prowler-ui.fullname" $ }}
port:
number: {{ $.Values.service.port }}
{{- end }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,11 @@
apiVersion: v1
kind: Secret
metadata:
name: {{ include "prowler-ui.fullname" . }}
labels:
{{- include "prowler-ui.labels" . | nindent 4 }}
type: Opaque
data:
{{- range $k, $v := .Values.secrets }}
{{ $k }}: {{ $v | toString | b64enc | quote }}
{{- end }}

View File

@@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "prowler-ui.fullname" . }}
labels:
{{- include "prowler-ui.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "prowler-ui.selectorLabels" . | nindent 4 }}

View File

@@ -0,0 +1,13 @@
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "prowler-ui.serviceAccountName" . }}
labels:
{{- include "prowler-ui.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
automountServiceAccountToken: {{ .Values.serviceAccount.automount }}
{{- end }}

View File

@@ -0,0 +1,132 @@
# Default values for prowler-ui.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
# This will set the replicaset count more information can be found here: https://kubernetes.io/docs/concepts/workloads/controllers/replicaset/
replicaCount: 1
# This sets the container image more information can be found here: https://kubernetes.io/docs/concepts/containers/images/
image:
repository: prowlercloud/prowler-ui
# This sets the pull policy for images.
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
# This is for the secretes for pulling an image from a private repository more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
imagePullSecrets: []
# This is to override the chart name.
nameOverride: ""
fullnameOverride: ""
secrets:
SITE_URL: http://localhost:3000
API_BASE_URL: http://prowler-api:8080/api/v1
NEXT_PUBLIC_API_DOCS_URL: http://prowler-api:8080/api/v1/docs
AUTH_TRUST_HOST: True
UI_PORT: 3000
# openssl rand -base64 32
AUTH_SECRET:
#This section builds out the service account more information can be found here: https://kubernetes.io/docs/concepts/security/service-accounts/
serviceAccount:
# Specifies whether a service account should be created
create: true
# Automatically mount a ServiceAccount's API credentials?
automount: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
# This is for setting Kubernetes Annotations to a Pod.
# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/
podAnnotations: {}
# This is for setting Kubernetes Labels to a Pod.
# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/
podLabels: {}
podSecurityContext: {}
# fsGroup: 2000
securityContext: {}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
# This is for setting up a service more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/
service:
# This sets the service type more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types
type: ClusterIP
# This sets the ports more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#field-spec-ports
port: 3000
# This block is for setting up the ingress for more information can be found here: https://kubernetes.io/docs/concepts/services-networking/ingress/
ingress:
enabled: false
className: ""
annotations: {}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
# This is to setup the liveness and readiness probes more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/
livenessProbe:
httpGet:
path: /
port: http
readinessProbe:
httpGet:
path: /
port: http
#This section is for setting up autoscaling more information can be found here: https://kubernetes.io/docs/concepts/workloads/autoscaling/
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
# Additional volumes on the output Deployment definition.
volumes: []
# - name: foo
# secret:
# secretName: mysecret
# optional: false
# Additional volumeMounts on the output Deployment definition.
volumeMounts: []
# - name: foo
# mountPath: "/etc/foo"
# readOnly: true
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -0,0 +1,24 @@
import warnings
from dashboard.common_methods import get_section_containers_cis
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_DESCRIPTION",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_cis(
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
)

View File

@@ -0,0 +1,25 @@
import warnings
from dashboard.common_methods import get_section_containers_cis
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_DESCRIPTION",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_cis(
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
)

View File

@@ -0,0 +1,24 @@
import warnings
from dashboard.common_methods import get_section_containers_cis
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_DESCRIPTION",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_cis(
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
)

View File

@@ -0,0 +1,23 @@
import warnings
from dashboard.common_methods import get_section_container_iso
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ATTRIBUTES_CATEGORY",
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID",
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_NAME",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
]
return get_section_container_iso(
aux, "REQUIREMENTS_ATTRIBUTES_CATEGORY", "REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID"
)

View File

@@ -0,0 +1,24 @@
import warnings
from dashboard.common_methods import get_section_containers_format3
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
]
return get_section_containers_format3(
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
)

View File

@@ -0,0 +1,23 @@
import warnings
from dashboard.common_methods import get_section_containers_format3
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
]
return get_section_containers_format3(
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
)

View File

@@ -0,0 +1,23 @@
import warnings
from dashboard.common_methods import get_section_containers_format3
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
]
return get_section_containers_format3(
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
)

View File

@@ -0,0 +1,24 @@
import warnings
from dashboard.common_methods import get_section_containers_format3
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
]
return get_section_containers_format3(
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
)

View File

@@ -76,7 +76,6 @@ def load_csv_files(csv_files):
result = result.replace("_AZURE", " - AZURE")
if "KUBERNETES" in result:
result = result.replace("_KUBERNETES", " - KUBERNETES")
result = result[result.find("CIS_") :]
results.append(result)
unique_results = set(results)
@@ -532,8 +531,8 @@ def get_bar_graph(df, column_name):
# Cut the text if it is too long
for i in range(len(colums)):
if len(colums[i]) > 15:
colums[i] = colums[i][:15] + "..."
if len(colums[i]) > 43:
colums[i] = colums[i][:43] + "..."
fig = px.bar(
df,

View File

@@ -165,9 +165,21 @@ else:
)
# For the timestamp, remove the two columns and keep only the date
data["TIMESTAMP"] = pd.to_datetime(data["TIMESTAMP"])
data["ASSESSMENT_TIME"] = data["TIMESTAMP"].dt.strftime("%Y-%m-%d %H:%M:%S")
# Handle findings from v3 outputs
if "FINDING_UNIQUE_ID" in data.columns:
data.rename(columns={"FINDING_UNIQUE_ID": "FINDING_UID"}, inplace=True)
if "ACCOUNT_ID" in data.columns:
data.rename(columns={"ACCOUNT_ID": "ACCOUNT_UID"}, inplace=True)
if "ASSESSMENT_START_TIME" in data.columns:
data.rename(columns={"ASSESSMENT_START_TIME": "TIMESTAMP"}, inplace=True)
if "RESOURCE_ID" in data.columns:
data.rename(columns={"RESOURCE_ID": "RESOURCE_UID"}, inplace=True)
# Remove dupplicates on the finding_uid colummn but keep the last one taking into account the timestamp
data = data.sort_values("TIMESTAMP").drop_duplicates("FINDING_UID", keep="last")
data["ASSESSMENT_TIME"] = data["TIMESTAMP"].dt.strftime("%Y-%m-%d")
data_valid = pd.DataFrame()
for account in data["ACCOUNT_UID"].unique():
all_times = data[data["ACCOUNT_UID"] == account]["ASSESSMENT_TIME"].unique()
@@ -1720,7 +1732,7 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
[
html.P(
html.Strong(
"Recomendation: ",
"Recommendation: ",
style={
"margin-right": "5px"
},
@@ -1744,7 +1756,7 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
[
html.P(
html.Strong(
"RecomendationUrl: ",
"RecommendationUrl: ",
style={
"margin-right": "5px"
},

Some files were not shown because too many files have changed in this diff Show More