Compare commits

..

341 Commits

Author SHA1 Message Date
github-actions
95d64399f3 chore(release): 3.14.0 2024-02-20 15:49:19 +00:00
Rubén De la Torre Vico
da1f266d1b feat(azure): new checks related with VMs service. (#3408)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-02-20 15:34:34 +00:00
Pedro Martín
9a22c2de8b feat(azure): Add new checks related to Network service (#3402)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-02-20 14:08:19 +00:00
Nacho Rivera
7d3c6a4a5e chore(release): update Prowler Version to 3.13.1 (#3420)
Co-authored-by: github-actions <noreply@github.com>
2024-02-20 13:13:27 +00:00
Pepe Fagoaga
753f32b4cb fix(inspector2): Report must have status field (#3419) 2024-02-20 12:58:03 +01:00
dependabot[bot]
bdf3236350 build(deps): bump google-api-python-client from 2.117.0 to 2.118.0 (#3417)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-20 10:51:49 +00:00
dependabot[bot]
d8a505b87c build(deps): bump mkdocs-material from 9.5.9 to 9.5.10 (#3416)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-20 09:52:19 +00:00
dependabot[bot]
caf021a7a6 build(deps): bump slack-sdk from 3.26.2 to 3.27.0 (#3415)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-20 09:50:48 +01:00
dependabot[bot]
3776856a6c build(deps-dev): bump pytest from 8.0.0 to 8.0.1 (#3414)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-20 09:28:44 +01:00
dependabot[bot]
c9f87b907c build(deps-dev): bump moto from 5.0.1 to 5.0.2 (#3413)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-20 08:30:33 +01:00
dependabot[bot]
ae378b6d50 build(deps): bump trufflesecurity/trufflehog from 3.67.5 to 3.67.6 (#3412)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-20 08:30:17 +01:00
Pedro Martín
f7afd7d1d6 feat(azure): Add new checks related to PostgreSQL service (#3409) 2024-02-19 11:33:59 +00:00
Rubén De la Torre Vico
c92a99baaf fix(azure): Typo in appinsights service (#3407)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2024-02-19 11:05:28 +00:00
Pepe Fagoaga
3c82d89aa4 fix(labeler): Work on forks too (#3410) 2024-02-19 11:04:37 +00:00
Nacho Rivera
69aedb8490 chore(regions_update): Changes in regions for AWS services. (#3406)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-02-16 10:45:17 +01:00
Rubén De la Torre Vico
af00c5382b feat(azure): checks related with MySQL service (#3385)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-02-16 10:40:41 +01:00
Pepe Fagoaga
8e93493d2b test(aws): Add default Boto3 credentials (#3404) 2024-02-16 09:13:51 +01:00
Pepe Fagoaga
ac439060a3 fix(labeler): Add right path for testing (#3405) 2024-02-16 09:13:25 +01:00
Pepe Fagoaga
d6f28be8f2 chore(pull-request): Add automatic labeler (#3398) 2024-02-15 14:26:41 +01:00
Nacho Rivera
d3946840de chore(regions_update): Changes in regions for AWS services. (#3401)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-02-15 14:25:37 +01:00
Pedro Martín
355f589e5a feat(azure): New Azure checks related to CosmosDB (#3386)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-02-13 13:53:36 +01:00
Rubén De la Torre Vico
4740a7b930 feat(azure): check related with App Insights service (#3395) 2024-02-13 13:27:12 +01:00
Hugo966
cc71249e21 fix(storage): update metadata with CIS 2.0 in storage_default_network_access_rule_is_denied (#3387) 2024-02-13 12:05:39 +01:00
dependabot[bot]
ccd9e27823 build(deps): bump google-api-python-client from 2.116.0 to 2.117.0 (#3391)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-13 10:52:21 +01:00
Sergio Garcia
9f16e4dc81 fix(backup): handle if last_attempted_execution_date is None (#3394) 2024-02-13 10:25:49 +01:00
dependabot[bot]
eca7f7be61 build(deps): bump mkdocs-material from 9.5.6 to 9.5.9 (#3392)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-13 10:25:35 +01:00
dependabot[bot]
409675e0c0 build(deps-dev): bump bandit from 1.7.6 to 1.7.7 (#3390)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-13 09:50:44 +01:00
dependabot[bot]
f9c839bfdc build(deps): bump trufflesecurity/trufflehog from 3.67.2 to 3.67.5 (#3393)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-13 09:24:11 +01:00
dependabot[bot]
47e212ee17 build(deps-dev): bump black from 24.1.1 to 24.2.0 (#3389)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-13 09:23:55 +01:00
Nacho Rivera
042976fac3 chore(regions_update): Changes in regions for AWS services. (#3384)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-02-09 13:44:26 +01:00
Sergio Garcia
5b45bbb1a5 chore(list): list compliance and categories sorted (#3381) 2024-02-08 16:54:47 +01:00
Sergio Garcia
9bb702076a chore(release): update Prowler Version to 3.13.0 (#3380)
Co-authored-by: github-actions <noreply@github.com>
2024-02-08 15:09:13 +01:00
Sergio Garcia
8ed97810a8 feat(cis): add new CIS AWS v3.0.0 (#3379)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2024-02-08 13:31:12 +01:00
Sergio Garcia
c5af9605ee fix(alias): allow multiple check aliases (#3378) 2024-02-08 12:21:42 +01:00
Iain Wallace
f5a18dce56 fix(cis): update CIS AWS v2.0 Section 2.1 refs (#3375)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-02-08 12:09:49 +01:00
Sergio Garcia
d14d8f5e02 chore(regions_update): Changes in regions for AWS services. (#3377) 2024-02-08 10:42:19 +01:00
Pepe Fagoaga
eadc66f53b fix(allowlist): Handle tags and resources (#3376) 2024-02-08 10:06:02 +01:00
Sergio Garcia
5f946d08cb chore(regions_update): Changes in regions for AWS services. (#3370) 2024-02-07 17:57:29 +01:00
Rubén De la Torre Vico
3f7c37abb9 feat(defender): New Terraform URL for metadata checks (#3374) 2024-02-07 16:02:56 +01:00
Pedro Martín
b60b48b948 feat(Azure): Add 4 new checks related to SQLServer and Vulnerability Assessment (#3372) 2024-02-07 16:01:52 +01:00
Sergio Garcia
68ecf939d9 feat(python): support Python 3.12 (#3371) 2024-02-07 15:16:02 +01:00
Rubén De la Torre Vico
a50d093679 fix(defender): Manage 404 exception for "default" security contacts (#3373) 2024-02-07 13:38:20 +01:00
Rubén De la Torre Vico
740e829e4f feat(azure): Defender check defender_ensure_iot_hub_defender_is_on (#3367) 2024-02-07 12:46:02 +01:00
Pedro Martín
f7051351ec fix(azure): Fix check sqlserver_auditing_retention_90_days (#3365) 2024-02-06 17:17:10 +01:00
dependabot[bot]
a1018ad683 build(deps): bump aiohttp from 3.9.1 to 3.9.2 (#3366)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 13:52:05 +01:00
dependabot[bot]
a912189e51 build(deps): bump msgraph-core from 0.2.2 to 1.0.0 (#3309)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-02-06 13:35:22 +01:00
Sergio Garcia
7298f64e5c fix(s3): add s3:Get* case to s3_bucket_policy_public_write_access (#3364) 2024-02-06 13:04:55 +01:00
Rubén De la Torre Vico
fcf902eb1f feat(azure): Defender checks related to defender settings (#3347)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-02-06 12:23:36 +01:00
Sergio Garcia
89c71a068b chore(pre-commit): remove pytest from pre-commit (#3363) 2024-02-06 11:22:00 +01:00
dependabot[bot]
8946145070 build(deps-dev): bump coverage from 7.4.0 to 7.4.1 (#3357)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 10:50:20 +01:00
Sergio Garcia
db15c0de9e fix(rds): verify SGs in rds_instance_no_public_access (#3341)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-02-06 10:49:58 +01:00
dependabot[bot]
643a918034 build(deps-dev): bump moto from 5.0.0 to 5.0.1 (#3358)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 10:33:51 +01:00
Sergio Garcia
f21dcd8122 chore(inspector): refactor inspector2_findings_exist check into two (#3338) 2024-02-06 10:32:19 +01:00
dependabot[bot]
ac44d4a27b build(deps-dev): bump black from 22.12.0 to 24.1.1 (#3356)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-02-06 10:17:01 +01:00
dependabot[bot]
9c898c34f6 build(deps): bump cryptography from 41.0.6 to 42.0.0 (#3362)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 10:05:34 +01:00
dependabot[bot]
c0e0ddbc1c build(deps): bump trufflesecurity/trufflehog from 3.66.1 to 3.67.2 (#3361)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 10:05:14 +01:00
dependabot[bot]
6c756ea52f build(deps): bump codecov/codecov-action from 3 to 4 (#3360)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 10:04:56 +01:00
dependabot[bot]
0a413b6fd2 build(deps): bump peter-evans/create-pull-request from 5 to 6 (#3359)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 09:58:55 +01:00
dependabot[bot]
7ac7d9c9a8 build(deps): bump google-api-python-client from 2.113.0 to 2.116.0 (#3355)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-06 09:58:28 +01:00
Toni de la Fuente
7322d0bd30 chore(docs): Update README.md (#3353)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-02-05 17:52:46 +01:00
Pedro Martín
469cc749d8 feat(readme): Update readme with new numbers for Prowler checks (#3354) 2024-02-05 17:49:43 +01:00
Toni de la Fuente
e91a694b46 chore(docs): update CODE_OF_CONDUCT.md (#3352) 2024-02-05 17:27:12 +01:00
Pedro Martín
4587a9f651 refactor(azure): Change class names from azure services and fix typing error (#3350) 2024-02-05 15:43:04 +01:00
Rubén De la Torre Vico
8c51094df1 fix(storage) Manage None type manage for key_expiration_period_in_days (#3351) 2024-02-05 15:42:03 +01:00
Rubén De la Torre Vico
c795d76fe9 feat(azure): Defender checks related to security contacts and notifications (#3344) 2024-02-05 13:51:56 +01:00
Pepe Fagoaga
c6e8a0b6d3 fix(organizations): Handle non existent policy (#3319) 2024-02-05 12:37:08 +01:00
dependabot[bot]
b23be4164f build(deps-dev): bump moto from 4.2.13 to 5.0.0 (#3329)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-02-05 12:06:16 +01:00
Pedro Martín
de77f3ff13 feat(azure): new check sqlserver_vulnerability_assessment_enabled (#3349) 2024-02-05 11:39:05 +01:00
Pedro Martín
7c0ff1ff6a feat(azure): New Azure SQLServer related check sqlserver_auditing_retention_90_days (#3345) 2024-02-05 10:58:44 +01:00
Sergio Garcia
888cb92987 chore(regions_update): Changes in regions for AWS services. (#3342)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-02-05 09:37:02 +01:00
Sergio Garcia
9a038f7bed chore(regions_update): Changes in regions for AWS services. (#3348)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-02-05 09:36:48 +01:00
Sergio Garcia
b98f245bf2 chore(regions_update): Changes in regions for AWS services. (#3339)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-02-05 09:20:26 +01:00
Sergio Garcia
e59b5caaf9 chore(regions_update): Changes in regions for AWS services. (#3333)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-02-05 09:20:09 +01:00
Sergio Garcia
5a602d7adb chore(regions_update): Changes in regions for AWS services. (#3325)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-02-05 09:18:49 +01:00
Pedro Martín
14aa7a3f67 feat(azure): SQLServer checks related to TDE encryption (#3343) 2024-02-02 11:35:18 +01:00
Pedro Martín
6e991107e7 feat(azure): New check storage_ensure_soft_delete_is_enabled (#3334) 2024-01-31 13:29:20 +01:00
Rubén De la Torre Vico
622bce9c52 feat(azure): Add check defender_ensure_system_updates_are_applied and defender_auto_provisioning_vulnerabilty_assessments_machines_on (#3327) 2024-01-31 12:29:45 +01:00
Pedro Martín
48587bd034 feat(compliance): account security onboarding compliance framework (#3286)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-01-31 10:18:31 +01:00
Rubén De la Torre Vico
19d6352950 fix(GuardDuty): fix class name (#3337) 2024-01-30 14:43:55 +01:00
dependabot[bot]
2c4b5c99ce build(deps): bump mkdocs-material from 9.5.4 to 9.5.6 (#3330)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-30 10:59:15 +01:00
dependabot[bot]
15a194c9b0 build(deps-dev): bump pytest from 7.4.4 to 8.0.0 (#3331)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-30 10:15:07 +01:00
dependabot[bot]
e94e3cead9 build(deps): bump trufflesecurity/trufflehog from 3.63.11 to 3.66.1 (#3332)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-30 10:14:51 +01:00
dependabot[bot]
ee2ed92fb5 build(deps-dev): bump vulture from 2.10 to 2.11 (#3328)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-30 09:46:17 +01:00
Pedro Martín
db4579435a feat(azure): add new check storage_ensure_private_endpoints_in_storage_accounts (#3326)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-01-29 13:55:19 +01:00
Pedro Martín
ae1ab1d957 feat(azure): Add new check storage_key_rotation_90_days (#3323) 2024-01-29 12:57:19 +01:00
Rubén De la Torre Vico
a8edd03e65 feat(azure): Add check defender_auto_provisioning_log_analytics_agent_vms_on (#3322)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2024-01-29 11:02:49 +01:00
Pepe Fagoaga
8768b4cc31 chore(actions): Add AWS tag to the update regions bot (#3321) 2024-01-29 10:15:16 +01:00
Pedro Martín
cd9c192208 chore(azure): Remove all unnecessary init methods in @dataclass (#3324) 2024-01-26 13:15:42 +01:00
Sergio Garcia
dcd97e7d26 chore(regions_update): Changes in regions for AWS services. (#3320)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-26 10:50:14 +01:00
Pedro Martín
8a6ae68b9a feat(azure): Add new check "iam_custom_role_permits_administering_resource_locks" (#3317)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-01-25 14:29:29 +01:00
Sergio Garcia
dff3e72e7d chore(regions_update): Changes in regions for AWS services. (#3318)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-25 14:14:27 +01:00
Sergio Garcia
f0ac440146 chore(regions_update): Changes in regions for AWS services. (#3316)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-24 11:57:11 +01:00
dependabot[bot]
7d7e5f4e1d build(deps): bump azure-mgmt-security from 5.0.0 to 6.0.0 (#3312)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-23 13:55:28 +01:00
Antoine Ansari
a21dd4a2ed feat(quick-inventory): custom output file in quick inventory (#3306)
Co-authored-by: antoinea <antoinea@padok.fr>
2024-01-23 10:05:45 +01:00
dependabot[bot]
7f4e5bf435 build(deps-dev): bump safety from 2.3.5 to 3.0.1 (#3313)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-23 09:13:19 +01:00
dependabot[bot]
dad590f070 build(deps): bump pydantic from 1.10.13 to 1.10.14 (#3311)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-23 08:57:26 +01:00
dependabot[bot]
f22b81fe3b build(deps): bump trufflesecurity/trufflehog from 3.63.9 to 3.63.11 (#3307)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-23 08:40:55 +01:00
dependabot[bot]
68c1acbc7a build(deps): bump tj-actions/changed-files from 41 to 42 (#3308)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-23 08:40:37 +01:00
dependabot[bot]
e5412404ca build(deps): bump jsonschema from 4.20.0 to 4.21.1 (#3310)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-23 08:40:13 +01:00
Sergio Garcia
5e733f6217 chore(regions_update): Changes in regions for AWS services. (#3303)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-22 09:23:39 +01:00
Pepe Fagoaga
c830e4e399 docs(security-hub): Add integration steps and images (#3304) 2024-01-22 09:13:24 +01:00
Pepe Fagoaga
c3ecd2b3e5 docs(security-hub): improve documentation and clarify steps (#3301) 2024-01-18 13:55:07 +01:00
Sergio Garcia
fd4d2db467 fix(BadRequest): add BadRequest exception to WellArchitected (#3300) 2024-01-18 10:42:27 +01:00
Sergio Garcia
49b76ab050 chore(docs): update documentation (#3297) 2024-01-18 10:40:06 +01:00
Sergio Garcia
c53f931d09 fix(NoSuchEntity): add NoSuchEntity exception to IAM (#3299) 2024-01-18 10:39:09 +01:00
Sergio Garcia
f344dbbc07 chore(regions_update): Changes in regions for AWS services. (#3298)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-18 10:35:23 +01:00
Esteban Mendoza
c617c10ffa fix(acm): adding more details on remaining expiration days (#3293)
Co-authored-by: Esteban <mendoza@versprite.com>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2024-01-17 09:42:19 +01:00
Sergio Garcia
4a15625bf9 chore(compliance): make SocType attribute general (#3287) 2024-01-16 13:41:08 +01:00
dependabot[bot]
c5def6d736 build(deps): bump mkdocs-material from 9.5.3 to 9.5.4 (#3285)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-16 08:07:11 +01:00
dependabot[bot]
b232b675a7 build(deps): bump actions/checkout from 3 to 4 (#3284)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-16 08:06:54 +01:00
dependabot[bot]
6c03683c20 build(deps): bump peter-evans/create-pull-request from 4 to 5 (#3283)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-16 08:06:37 +01:00
dependabot[bot]
2da57db5a8 build(deps): bump docker/login-action from 2 to 3 (#3282)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-16 08:05:59 +01:00
dependabot[bot]
c7b794c1c4 build(deps): bump docker/build-push-action from 2 to 5 (#3281)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-16 08:05:44 +01:00
dependabot[bot]
5154cec7d2 build(deps): bump slack-sdk from 3.26.1 to 3.26.2 (#3280)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-15 11:44:57 +01:00
dependabot[bot]
e4cbb3c90e build(deps): bump actions/setup-python from 2 to 5 (#3277)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-15 11:36:01 +01:00
dependabot[bot]
17f5cbeac2 build(deps): bump docker/setup-buildx-action from 2 to 3 (#3276)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-15 11:30:25 +01:00
dependabot[bot]
90a4924508 build(deps): bump github/codeql-action from 2 to 3 (#3279)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-15 11:29:58 +01:00
dependabot[bot]
d499053016 build(deps): bump aws-actions/configure-aws-credentials from 1 to 4 (#3278)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-15 11:29:39 +01:00
dependabot[bot]
d343a67d6a build(deps): bump trufflesecurity/trufflehog from 3.4.4 to 3.63.9 (#3275)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-15 11:29:30 +01:00
Pepe Fagoaga
8435ab48b0 chore(dependabot): Run for GHA (#3274) 2024-01-15 11:19:44 +01:00
Sergio Garcia
27edf0f55a chore(regions_update): Changes in regions for AWS services. (#3273)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-15 10:53:55 +01:00
Sergio Garcia
3d00554332 chore(README): update syntax of supported Python versions (#3271) 2024-01-12 12:59:56 +01:00
Toni de la Fuente
2631709abf docs(README): Update Kubernetes development status and Python supported versions (#3270) 2024-01-12 12:17:06 +01:00
Sergio Garcia
4b0102b309 chore(release): update Prowler Version to 3.12.1 (#3269)
Co-authored-by: github-actions <noreply@github.com>
2024-01-12 11:52:02 +01:00
Nacho Rivera
b9a24e0338 fix(fms): handle list compliance status error (#3259) 2024-01-12 11:00:07 +01:00
Sergio Garcia
f127d4a8b1 chore(regions_update): Changes in regions for AWS services. (#3268)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-12 10:15:16 +01:00
Pepe Fagoaga
73780682a1 fix(allowlist): Handle empty exceptions (#3266) 2024-01-12 09:54:03 +01:00
dependabot[bot]
9a1c034a51 build(deps): bump jinja2 from 3.1.2 to 3.1.3 (#3267)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-12 08:31:37 +01:00
Sergio Garcia
94179f27ec chore(readme): remove deprecated library name (#3251) 2024-01-11 17:55:44 +01:00
Pepe Fagoaga
6797b5a93d fix(apigatewayv2_api_access_logging_enabled): Finding ID should be unique (#3263) 2024-01-11 15:15:48 +01:00
Nacho Rivera
874a131ec9 chore(precommit): set trufflehog as command (#3262) 2024-01-11 11:47:19 +01:00
Nacho Rivera
641727ee0e fix(rds): handle api call error response (#3258) 2024-01-11 09:50:44 +01:00
dependabot[bot]
f50075257c build(deps-dev): bump gitpython from 3.1.37 to 3.1.41 (#3257) 2024-01-11 09:50:16 +01:00
Sergio Garcia
4d1de8f75c chore(regions_update): Changes in regions for AWS services. (#3256)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-10 10:20:50 +01:00
Pepe Fagoaga
b76d0153eb chore(s3): Update log not to duplicate it (#3255) 2024-01-10 10:00:02 +01:00
Sergio Garcia
f82789b99f chore(regions_update): Changes in regions for AWS services. (#3249)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-09 10:31:05 +01:00
dependabot[bot]
89c789ce10 build(deps-dev): bump flake8 from 6.1.0 to 7.0.0 (#3246)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-09 10:02:54 +01:00
Pepe Fagoaga
6dba54b028 docs: Add Codecov badge (#3248) 2024-01-09 09:54:30 +01:00
dependabot[bot]
d852cb4ed6 build(deps): bump google-api-python-client from 2.111.0 to 2.113.0 (#3245)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-09 09:44:47 +01:00
dependabot[bot]
4c666fa1fe build(deps-dev): bump moto from 4.2.12 to 4.2.13 (#3244)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-09 09:01:42 +01:00
Sergio Garcia
98adc1872d chore(release): update Prowler Version to 3.12.0 (#3242)
Co-authored-by: github-actions <noreply@github.com>
2024-01-08 15:05:17 +01:00
Sergio Garcia
1df84ef6e4 chore(role arguments): enhance role arguments validation (#3240) 2024-01-08 14:41:52 +01:00
Sergio Garcia
80b88a9365 chore(exception): handle error in describing regions (#3241) 2024-01-08 14:16:27 +01:00
Fennerr
558b7a54c7 feat(aws): Added AWS role session name parameter (#3234)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-01-08 12:49:13 +01:00
Sergio Garcia
9522d0c733 fix(organizations_scp_check_deny_regions): enhance check logic (#3239) 2024-01-08 12:20:39 +01:00
dependabot[bot]
396d6e5c0e build(deps-dev): bump coverage from 7.3.4 to 7.4.0 (#3233)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-03 18:21:12 +01:00
Sergio Garcia
a69d7471b3 chore(regions_update): Changes in regions for AWS services. (#3236)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2024-01-03 13:34:14 +01:00
dependabot[bot]
eb56e1417c build(deps-dev): bump pytest from 7.4.3 to 7.4.4 (#3232)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-03 13:33:48 +01:00
dependabot[bot]
3d032a8efe build(deps): bump tj-actions/changed-files from 39 to 41 in /.github/workflows (#3235)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-03 13:30:21 +01:00
Sergio Garcia
d712470047 chore(regions_update): Changes in regions for AWS services. (#3231) 2023-12-29 10:56:24 +01:00
Pepe Fagoaga
423f96b95f fix(fms): Handle PolicyComplianceStatusList key error (#3230)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-12-28 18:25:21 +01:00
Sergio Garcia
d1bd097079 chore(regions_update): Changes in regions for AWS services. (#3228)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-12-28 10:24:10 +01:00
Evgenii
ceabe8ecba chore: сhanged concatenation of strings to f-strings to improve readability (#3227) 2023-12-28 08:51:00 +01:00
Pepe Fagoaga
0fff0568fa fix(allowlist): Analyse single and multi account allowlist if present (#3210)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-12-27 11:02:31 +01:00
dependabot[bot]
10e822238e build(deps): bump google-api-python-client from 2.110.0 to 2.111.0 (#3224)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-26 10:26:13 +01:00
dependabot[bot]
1cf1c827f1 build(deps-dev): bump freezegun from 1.3.1 to 1.4.0 (#3222)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-26 09:33:12 +01:00
dependabot[bot]
5bada440fa build(deps-dev): bump coverage from 7.3.3 to 7.3.4 (#3223)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-26 09:09:29 +01:00
Sergio Garcia
04bb95e044 chore(ENS): add missing ENS mappings (#3218) 2023-12-26 09:08:54 +01:00
dependabot[bot]
819140bc59 build(deps): bump shodan from 1.30.1 to 1.31.0 (#3221)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-26 08:54:01 +01:00
Sergio Garcia
d490bcc955 chore(regions_update): Changes in regions for AWS services. (#3219)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-12-26 08:49:41 +01:00
dependabot[bot]
cb94960178 build(deps): bump mkdocs-material from 9.5.2 to 9.5.3 (#3220)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-26 08:39:31 +01:00
Sergio Garcia
7361c10cb9 fix(s3): handle NoSuchBucketPolicy error (#3217) 2023-12-22 10:57:55 +01:00
Sergio Garcia
b47408e94e fix(trustedadvisor): solve trustedadvisor check metadata (#3216) 2023-12-22 10:56:21 +01:00
Sergio Garcia
806a3590aa chore(regions_update): Changes in regions for AWS services. (#3215)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-12-22 10:49:04 +01:00
Sergio Garcia
e953fe021d chore(regions_update): Changes in regions for AWS services. (#3214) 2023-12-21 11:34:33 +01:00
Sergio Garcia
e570d94a6e chore(regions_update): Changes in regions for AWS services. (#3213)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-20 14:08:52 +01:00
Nacho Rivera
78505cb0a8 chore(sqs_...not_publicly_accessible): less restrictive condition test (#3211)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-19 16:53:19 +01:00
dependabot[bot]
f8d77d9a30 build(deps): bump google-auth-httplib2 from 0.1.1 to 0.2.0 (#3207)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-19 13:05:30 +01:00
Sergio Garcia
1a4887f028 chore(regions_update): Changes in regions for AWS services. (#3209)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-12-19 12:39:19 +01:00
dependabot[bot]
71042b5919 build(deps): bump mkdocs-material from 9.4.14 to 9.5.2 (#3206)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-19 12:39:10 +01:00
dependabot[bot]
435976800a build(deps-dev): bump moto from 4.2.11 to 4.2.12 (#3205)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-19 10:14:04 +01:00
dependabot[bot]
18f4c7205b build(deps-dev): bump coverage from 7.3.2 to 7.3.3 (#3204)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-19 08:55:14 +01:00
dependabot[bot]
06eeefb8bf build(deps-dev): bump pylint from 3.0.2 to 3.0.3 (#3203)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-19 08:30:45 +01:00
Sergio Garcia
1737d7cf42 fix(gcp): fix UnknownApiNameOrVersion error (#3202) 2023-12-18 14:32:33 +01:00
dependabot[bot]
cd03fa6d46 build(deps): bump jsonschema from 4.18.0 to 4.20.0 (#3057)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-18 13:00:43 +01:00
Sergio Garcia
a10a73962e chore(regions_update): Changes in regions for AWS services. (#3200) 2023-12-18 07:21:18 +01:00
Pepe Fagoaga
99d6fee7a0 fix(iam): Handle NoSuchEntity in list_group_policies (#3197) 2023-12-15 14:04:59 +01:00
Nacho Rivera
c8831f0f50 chore(s3 bucket input validation): validates input bucket (#3198) 2023-12-15 13:37:41 +01:00
Pepe Fagoaga
fdeb523581 feat(securityhub): Send only FAILs but storing all in the output files (#3195) 2023-12-15 13:31:55 +01:00
Sergio Garcia
9a868464ee chore(regions_update): Changes in regions for AWS services. (#3196)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-12-15 10:15:54 +01:00
Alexandros Gidarakos
051ec75e01 docs(cloudshell): Update AWS CloudShell installation steps (#3192)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-14 08:35:23 +01:00
Alexandros Gidarakos
fc3909491a docs(cloudshell): Add missing steps to workaround (#3191) 2023-12-14 08:18:24 +01:00
Pepe Fagoaga
2437fe270c docs(cloudshell): Add workaround to clone from github (#3190) 2023-12-13 17:19:30 +01:00
Nacho Rivera
c937b193d0 fix(apigw_restapi_auth check): add method auth testing (#3183) 2023-12-13 16:20:09 +01:00
Fennerr
8b5c995486 fix(lambda): memory leakage with lambda function code (#3167)
Co-authored-by: Justin Moorcroft <justin.moorcroft@mwrcybersec.com>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-13 15:15:13 +01:00
Sergio Garcia
4410f2a582 chore(regions_update): Changes in regions for AWS services. (#3189)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-12-13 10:32:10 +01:00
Fennerr
bbb816868e docs(aws): Added debug information to inspect retries in API calls (#3186)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-12 14:07:33 +01:00
Fennerr
2441cca810 fix(threading): Improved threading for the AWS Service (#3175)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-12 12:50:26 +01:00
Sergio Garcia
3c3dfb380b fix(gcp): improve logging messages (#3185) 2023-12-12 12:38:50 +01:00
Nacho Rivera
0f165f0bf0 chore(actions): add prowler 4.0 branch to actions (#3184) 2023-12-12 11:40:01 +01:00
Sergio Garcia
7fcff548eb chore(regions_update): Changes in regions for AWS services. (#3182)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-12-12 10:28:01 +01:00
dependabot[bot]
8fa7b9ba00 build(deps-dev): bump docker from 6.1.3 to 7.0.0 (#3180)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-12 10:27:49 +01:00
dependabot[bot]
b101e15985 build(deps-dev): bump bandit from 1.7.5 to 1.7.6 (#3179)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-12 09:53:03 +01:00
dependabot[bot]
b4e412a37f build(deps-dev): bump pylint from 3.0.2 to 3.0.3 (#3181)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-12 09:33:27 +01:00
dependabot[bot]
ac0e2bbdb2 build(deps): bump google-api-python-client from 2.109.0 to 2.110.0 (#3178)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-12 08:07:30 +01:00
Sergio Garcia
ba16330e20 feat(cognito): add Amazon Cognito service (#3060) 2023-12-11 14:35:00 +01:00
Pepe Fagoaga
c9cb9774c6 fix(aws_regions): Get enabled regions (#3095) 2023-12-11 14:09:39 +01:00
Pepe Fagoaga
7b5b14dbd0 refactor(cloudwatch): simplify logic (#3172) 2023-12-11 11:23:24 +01:00
Fennerr
bd13973cf5 docs(parallel-execution): Combining the output files (#3096) 2023-12-11 11:11:53 +01:00
Fennerr
a7f8656e89 chore(elb): Improve status in elbv2_insecure_ssl_ciphers (#3169) 2023-12-11 11:04:37 +01:00
Sergio Garcia
1be52fab06 chore(ens): do not apply recomendation type to score (#3058) 2023-12-11 10:53:26 +01:00
Pepe Fagoaga
c9baff1a7f fix(generate_regional_clients): Global is not needed anymore (#3162) 2023-12-11 10:50:15 +01:00
Pepe Fagoaga
d1bc68086d fix(access-analyzer): Handle ValidationException (#3165) 2023-12-11 09:40:12 +01:00
Pepe Fagoaga
44a4c0670b fix(cloudtrail): Handle UnsupportedOperationException (#3166) 2023-12-11 09:38:23 +01:00
Pepe Fagoaga
4785056740 fix(elasticache): Handle CacheClusterNotFound (#3174) 2023-12-11 09:37:01 +01:00
Pepe Fagoaga
694aa448a4 fix(s3): Handle NoSuchBucket in the service (#3173) 2023-12-11 09:36:26 +01:00
Sergio Garcia
ee215b1ced chore(regions_update): Changes in regions for AWS services. (#3168) 2023-12-11 08:04:48 +01:00
Nacho Rivera
018e87884c test(audit_info): missing workspace test (#3164) 2023-12-05 16:05:39 +01:00
Nacho Rivera
a81cbbc325 test(audit_info): refactor iam (#3163) 2023-12-05 15:59:53 +01:00
Pepe Fagoaga
3962c9d816 test(audit_info): refactor acm, account and access analyzer (#3097) 2023-12-05 15:09:14 +01:00
Pepe Fagoaga
e187875da5 test(audit_info): refactor guardduty (#3160) 2023-12-05 15:00:46 +01:00
Pepe Fagoaga
f0d1a799a2 test(audit_info): refactor cloudtrail (#3111) 2023-12-05 14:59:42 +01:00
Pepe Fagoaga
5452d535d7 test(audit_info): refactor ec2 (#3132) 2023-12-05 14:58:58 +01:00
Pepe Fagoaga
7a776532a8 test(aws_account_id): refactor (#3161) 2023-12-05 14:58:42 +01:00
Nacho Rivera
e704d57957 test(audit_info): refactor inspector2 (#3159) 2023-12-05 14:19:40 +01:00
Pepe Fagoaga
c9a6eb5a1a test(audit_info): refactor globalaccelerator (#3154) 2023-12-05 14:13:02 +01:00
Pepe Fagoaga
c071812160 test(audit_info): refactor glue (#3158) 2023-12-05 14:12:44 +01:00
Pepe Fagoaga
3f95ad9ada test(audit_info): refactor glacier (#3153) 2023-12-05 14:09:04 +01:00
Nacho Rivera
250f59c9f5 test(audit_info): refactor kms (#3157) 2023-12-05 14:05:56 +01:00
Nacho Rivera
c17bbea2c7 test(audit_info): refactor macie (#3156) 2023-12-05 13:59:08 +01:00
Nacho Rivera
0262f8757a test(audit_info): refactor neptune (#3155) 2023-12-05 13:48:32 +01:00
Nacho Rivera
dbc2c481dc test(audit_info): refactor networkfirewall (#3152) 2023-12-05 13:20:52 +01:00
Pepe Fagoaga
e432c39eec test(audit_info): refactor fms (#3151) 2023-12-05 13:18:28 +01:00
Pepe Fagoaga
7383ae4f9c test(audit_info): refactor elbv2 (#3148) 2023-12-05 13:18:06 +01:00
Pepe Fagoaga
d217e33678 test(audit_info): refactor emr (#3149) 2023-12-05 13:17:42 +01:00
Nacho Rivera
d1daceff91 test(audit_info): refactor opensearch (#3150) 2023-12-05 13:17:28 +01:00
Nacho Rivera
dbbd556830 test(audit_info): refactor organizations (#3147) 2023-12-05 12:59:22 +01:00
Nacho Rivera
d483f1d90f test(audit_info): refactor rds (#3146) 2023-12-05 12:51:22 +01:00
Nacho Rivera
80684a998f test(audit_info): refactor redshift (#3144) 2023-12-05 12:42:08 +01:00
Pepe Fagoaga
0c4f0fde48 test(audit_info): refactor elb (#3145) 2023-12-05 12:41:37 +01:00
Pepe Fagoaga
071115cd52 test(audit_info): refactor elasticache (#3142) 2023-12-05 12:41:11 +01:00
Nacho Rivera
9136a755fe test(audit_info): refactor resourceexplorer2 (#3143) 2023-12-05 12:28:38 +01:00
Nacho Rivera
6ff864fc04 test(audit_info): refactor route53 (#3141) 2023-12-05 12:28:12 +01:00
Nacho Rivera
828a6f4696 test(audit_info): refactor s3 (#3140) 2023-12-05 12:13:21 +01:00
Pepe Fagoaga
417aa550a6 test(audit_info): refactor eks (#3139) 2023-12-05 12:07:41 +01:00
Pepe Fagoaga
78ffc2e238 test(audit_info): refactor efs (#3138) 2023-12-05 12:07:21 +01:00
Pepe Fagoaga
c9f22db1b5 test(audit_info): refactor ecs (#3137) 2023-12-05 12:07:01 +01:00
Pepe Fagoaga
41da560b64 test(audit_info): refactor ecr (#3136) 2023-12-05 12:06:42 +01:00
Nacho Rivera
b49e0b95f7 test(audit_info): refactor shield (#3131) 2023-12-05 11:40:42 +01:00
Nacho Rivera
50ef2729e6 test(audit_info): refactor sagemaker (#3135) 2023-12-05 11:40:19 +01:00
Nacho Rivera
6a901bb7de test(audit_info): refactor secretsmanager (#3134) 2023-12-05 11:33:54 +01:00
Nacho Rivera
f0da63c850 test(audit_info): refactor shub (#3133) 2023-12-05 11:33:34 +01:00
Nacho Rivera
b861c1dd3c test(audit_info): refactor sns (#3128) 2023-12-05 11:05:27 +01:00
Nacho Rivera
45faa2e9e8 test(audit_info): refactor sqs (#3130) 2023-12-05 11:05:05 +01:00
Pepe Fagoaga
b2e1eed684 test(audit_info): refactor dynamodb (#3129) 2023-12-05 10:59:26 +01:00
Pepe Fagoaga
4018221da6 test(audit_info): refactor drs (#3127) 2023-12-05 10:59:09 +01:00
Pepe Fagoaga
28ec3886f9 test(audit_info): refactor documentdb (#3126) 2023-12-05 10:58:48 +01:00
Pepe Fagoaga
ed323f4602 test(audit_info): refactor dlm (#3124) 2023-12-05 10:58:31 +01:00
Pepe Fagoaga
f72d360384 test(audit_info): refactor directoryservice (#3123) 2023-12-05 10:58:09 +01:00
Nacho Rivera
682bba452b test(audit_info): refactor ssm (#3125) 2023-12-05 10:45:15 +01:00
Nacho Rivera
e2ce5ae2af test(audit_info): refactor ssmincidents (#3122) 2023-12-05 10:38:09 +01:00
Nacho Rivera
039a0da69e tests(audit_info): refactor trustedadvisor (#3120) 2023-12-05 10:30:54 +01:00
Pepe Fagoaga
c9ad12b87e test(audit_info): refactor config (#3121) 2023-12-05 10:30:13 +01:00
Pepe Fagoaga
094be2e2e6 test(audit_info): refactor codeartifact (#3117) 2023-12-05 10:17:08 +01:00
Pepe Fagoaga
1b3029d833 test(audit_info): refactor codebuild (#3118) 2023-12-05 10:17:02 +01:00
Nacho Rivera
d00d5e863b tests(audit_info): refactor vpc (#3119) 2023-12-05 10:16:51 +01:00
Pepe Fagoaga
3d19e89710 test(audit_info): refactor cloudwatch (#3116) 2023-12-05 10:04:45 +01:00
Pepe Fagoaga
247cd6fc44 test(audit_info): refactor cloudfront (#3110) 2023-12-05 10:04:07 +01:00
Pepe Fagoaga
ba244c887f test(audit_info): refactor cloudformation (#3105) 2023-12-05 10:03:50 +01:00
Pepe Fagoaga
f77d92492a test(audit_info): refactor backup (#3104) 2023-12-05 10:03:32 +01:00
Pepe Fagoaga
1b85af95c0 test(audit_info): refactor athena (#3101) 2023-12-05 10:03:11 +01:00
Pepe Fagoaga
9236f5d058 test(audit_info): refactor autoscaling (#3102) 2023-12-05 10:02:54 +01:00
dependabot[bot]
39ba8cd230 build(deps-dev): bump freezegun from 1.2.2 to 1.3.1 (#3109)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-05 09:51:57 +01:00
Nacho Rivera
e67328945f test(audit_info): refactor waf (#3115) 2023-12-05 09:51:37 +01:00
Nacho Rivera
bcee2b0b6d test(audit_info): refactor wafv2 (#3114) 2023-12-05 09:51:20 +01:00
Nacho Rivera
be9a1b2f9a test(audit_info): refactor wellarchitected (#3113) 2023-12-05 09:40:31 +01:00
dependabot[bot]
4f9c2aadc2 build(deps-dev): bump moto from 4.2.10 to 4.2.11 (#3108)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-05 09:34:13 +01:00
Pepe Fagoaga
25d419ac7f test(audit_info): refactor appstream (#3100) 2023-12-05 09:33:53 +01:00
Pepe Fagoaga
57cfb508f1 test(audit_info): refactor apigateway (#3098) 2023-12-05 09:33:20 +01:00
Pepe Fagoaga
c88445f90d test(audit_info): refactor apigatewayv2 (#3099) 2023-12-05 09:32:31 +01:00
Nacho Rivera
9b6d6c3a42 test(audit_info): refactor workspaces (#3112) 2023-12-05 09:32:13 +01:00
Pepe Fagoaga
d26c1405ce test(audit_info): refactor awslambda (#3103) 2023-12-05 09:18:23 +01:00
dependabot[bot]
4bb35ab92d build(deps): bump slack-sdk from 3.26.0 to 3.26.1 (#3107)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-05 08:39:26 +01:00
dependabot[bot]
cdd983aa04 build(deps): bump google-api-python-client from 2.108.0 to 2.109.0 (#3106)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-05 08:12:57 +01:00
Nacho Rivera
e83ce86eb3 fix(docs): typo in reporting/csv (#3094) 2023-12-04 10:20:57 +01:00
Nacho Rivera
bcc590a3ee chore(actions): not launch linters for mkdocs.yml (#3093) 2023-12-04 09:57:18 +01:00
Fennerr
5fdffb93d1 docs(parallel-execution): How to execute it in parallel (#3091)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-04 09:48:46 +01:00
Nacho Rivera
db20b2c04f fix(docs): csv fields (#3092)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-04 09:46:20 +01:00
Nacho Rivera
4e037c0f43 fix(send_to_s3_bucket): don't kill exec when fail (#3088) 2023-12-01 13:25:59 +01:00
Nacho Rivera
fdcc2ac5cb revert(clean local dirs): delete clean local dirs output feature (#3087) 2023-12-01 12:26:59 +01:00
William
9099bd79f8 fix(vpc_different_regions): Handle if there are no VPC (#3081)
Co-authored-by: William Brady <will@crofton.cloud>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-12-01 11:44:23 +01:00
Pepe Fagoaga
a01683d8f6 refactor(severities): Define it in one place (#3086) 2023-12-01 11:39:35 +01:00
Pepe Fagoaga
6d2b2a9a93 refactor(load_checks_to_execute): Refactor function and add tests (#3066) 2023-11-30 17:41:14 +01:00
Sergio Garcia
de4166bf0d chore(regions_update): Changes in regions for AWS services. (#3079) 2023-11-29 11:21:06 +01:00
dependabot[bot]
1cbef30788 build(deps): bump cryptography from 41.0.4 to 41.0.6 (#3078)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-29 08:17:34 +01:00
Nacho Rivera
89c6e27489 fix(trustedadvisor): handle missing checks dict key (#3075) 2023-11-28 10:37:24 +01:00
Sergio Garcia
f74ffc530d chore(regions_update): Changes in regions for AWS services. (#3074)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-28 10:22:29 +01:00
dependabot[bot]
441d4d6a38 build(deps-dev): bump moto from 4.2.9 to 4.2.10 (#3073)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-28 09:57:56 +01:00
dependabot[bot]
3c6b9d63a6 build(deps): bump slack-sdk from 3.24.0 to 3.26.0 (#3072) 2023-11-28 09:21:46 +01:00
dependabot[bot]
254d8616b7 build(deps-dev): bump pytest-xdist from 3.4.0 to 3.5.0 (#3071) 2023-11-28 09:06:23 +01:00
dependabot[bot]
d3bc6fda74 build(deps): bump mkdocs-material from 9.4.10 to 9.4.14 (#3070)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-28 08:46:49 +01:00
Nacho Rivera
e4a5d9376f fix(clean local output dirs): change function description (#3068)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-11-27 14:55:34 +01:00
Nacho Rivera
523605e3e7 fix(set_azure_audit_info): assign correct logging when no auth (#3063) 2023-11-27 11:00:22 +01:00
Nacho Rivera
ed33fac337 fix(gcp provider): move generate_client for consistency (#3064) 2023-11-27 10:31:40 +01:00
Sergio Garcia
bf0e62aca5 chore(regions_update): Changes in regions for AWS services. (#3065)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-27 10:30:12 +01:00
Nacho Rivera
60c0b79b10 fix(outputs): initialize_file_descriptor is called dynamically (#3050) 2023-11-21 16:05:26 +01:00
Sergio Garcia
f9d2e7aa93 chore(regions_update): Changes in regions for AWS services. (#3059)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-21 11:07:08 +01:00
dependabot[bot]
0646748e24 build(deps): bump google-api-python-client from 2.107.0 to 2.108.0 (#3056)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-21 09:31:25 +01:00
dependabot[bot]
f6408e9df7 build(deps-dev): bump moto from 4.2.8 to 4.2.9 (#3055)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-21 08:14:00 +01:00
dependabot[bot]
5769bc815c build(deps): bump mkdocs-material from 9.4.8 to 9.4.10 (#3054)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-21 07:51:27 +01:00
dependabot[bot]
5a3e3e9b1f build(deps): bump slack-sdk from 3.23.0 to 3.24.0 (#3053) 2023-11-21 07:31:15 +01:00
Pepe Fagoaga
26cbafa204 fix(deps): Add missing jsonschema (#3052) 2023-11-20 18:41:39 +01:00
Sergio Garcia
d14541d1de fix(json-ocsf): add profile only for AWS provider (#3051) 2023-11-20 17:00:36 +01:00
Sergio Garcia
3955ebd56c chore(python): update python version constraint <3.12 (#3047) 2023-11-20 14:49:09 +01:00
Ignacio Dominguez
e212645cf0 fix(codeartifact): solve dependency confusion check (#2999)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-11-20 14:48:46 +01:00
Sergio Garcia
db9c1c24d3 chore(moto): install all moto dependencies (#3048) 2023-11-20 13:44:53 +01:00
Vajrala Venkateswarlu
0a305c281f feat(custom_checks_metadata): Add checks metadata overide for severity (#3038)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-11-20 10:44:47 +01:00
Sergio Garcia
43c96a7875 chore(regions_update): Changes in regions for AWS services. (#3045)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-20 10:15:32 +01:00
Sergio Garcia
3a93aba7d7 chore(release): update Prowler Version to 3.11.3 (#3044)
Co-authored-by: github-actions <noreply@github.com>
2023-11-16 17:07:14 +01:00
Sergio Garcia
3d563356e5 fix(json): check if profile is None (#3043) 2023-11-16 13:52:07 +01:00
Johnny Lu
9205ef30f8 fix(securityhub): findings not being imported or archived in non-aws partitions (#3040)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-11-16 11:27:28 +01:00
Sergio Garcia
19c2dccc6d chore(regions_update): Changes in regions for AWS services. (#3042)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-16 11:09:41 +01:00
Sergio Garcia
8f819048ed chore(release): update Prowler Version to 3.11.2 (#3037)
Co-authored-by: github-actions <noreply@github.com>
2023-11-15 09:07:57 +01:00
Sergio Garcia
3a3bb44f11 fix(GuardDuty): only execute checks if GuardDuty enabled (#3028) 2023-11-14 14:14:05 +01:00
Nacho Rivera
f8e713a544 feat(azure regions): support non default azure region (#3013)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-11-14 13:17:48 +01:00
Pepe Fagoaga
573f1eba56 fix(securityhub): Use enabled_regions instead of audited_regions (#3029) 2023-11-14 12:57:54 +01:00
simone ragonesi
a36be258d8 chore: modify latest version msg (#3036)
Signed-off-by: r3drun3 <simone.ragonesi@sighup.io>
2023-11-14 12:11:55 +01:00
Sergio Garcia
690ec057c3 fix(ec2_securitygroup_not_used): check if security group is associated (#3026) 2023-11-14 12:03:01 +01:00
dependabot[bot]
2681feb1f6 build(deps): bump azure-storage-blob from 12.18.3 to 12.19.0 (#3034)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 11:47:42 +01:00
Sergio Garcia
e662adb8c5 chore(regions_update): Changes in regions for AWS services. (#3035)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-14 11:47:24 +01:00
Sergio Garcia
c94bd96c93 chore(args): make compatible severity and services arguments (#3024) 2023-11-14 11:26:53 +01:00
dependabot[bot]
6d85433194 build(deps): bump alive-progress from 3.1.4 to 3.1.5 (#3033)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 09:41:32 +01:00
dependabot[bot]
7a6092a779 build(deps): bump google-api-python-client from 2.106.0 to 2.107.0 (#3032)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 09:16:00 +01:00
dependabot[bot]
4c84529aed build(deps-dev): bump pytest-xdist from 3.3.1 to 3.4.0 (#3031)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 08:48:02 +01:00
Sergio Garcia
512d3e018f chore(accessanalyzer): include service in allowlist_non_default_regions (#3025) 2023-11-14 08:00:17 +01:00
dependabot[bot]
c6aff985c9 build(deps-dev): bump moto from 4.2.7 to 4.2.8 (#3030)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 07:54:34 +01:00
Sergio Garcia
7fadf31a2b chore(release): update Prowler Version to 3.11.1 (#3021)
Co-authored-by: github-actions <noreply@github.com>
2023-11-10 12:53:07 +01:00
Sergio Garcia
e7d098ed1e chore(regions_update): Changes in regions for AWS services. (#3020)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-10 11:34:44 +01:00
Sergio Garcia
21fba27355 fix(iam): do not list tags for inline policies (#3014) 2023-11-10 09:51:19 +01:00
John Mastron
74e37307f7 fix(SQS): fix invalid SQS ARNs (#3016)
Co-authored-by: John Mastron <jmastron@jpl.nasa.gov>
2023-11-10 09:33:18 +01:00
Sergio Garcia
d9d7c009a5 fix(rds): check if engines exist in region (#3012) 2023-11-10 09:20:36 +01:00
Pepe Fagoaga
2220cf9733 refactor(allowlist): Simplify and handle corner cases (#3019) 2023-11-10 09:11:52 +01:00
Pepe Fagoaga
3325b72b86 fix(iam-sqs): Handle exceptions for non-existent resources (#3010) 2023-11-08 14:06:45 +01:00
Sergio Garcia
9182d56246 chore(regions_update): Changes in regions for AWS services. (#3011)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-08 10:42:23 +01:00
Nacho Rivera
299ece19a8 fix(clean local output dirs): clean dirs when output to s3 (#2997) 2023-11-08 10:05:24 +01:00
Sergio Garcia
0a0732d7c0 docs(gcp): update GCP permissions (#3008) 2023-11-07 14:06:22 +01:00
Sergio Garcia
28011d97a9 chore(regions_update): Changes in regions for AWS services. (#3007)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-07 11:04:45 +01:00
Sergio Garcia
e71b0d1b6a chore(regions_update): Changes in regions for AWS services. (#3001)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-07 11:04:36 +01:00
John Mastron
ec01b62a82 fix(aws): check all conditions in IAM policy parser (#3006)
Co-authored-by: John Mastron <jmastron@jpl.nasa.gov>
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-11-07 10:40:34 +01:00
dependabot[bot]
12b45c6896 build(deps): bump google-api-python-client from 2.105.0 to 2.106.0 (#3005)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-07 09:45:51 +01:00
dependabot[bot]
51c60dd4ee build(deps): bump mkdocs-material from 9.4.7 to 9.4.8 (#3004)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-07 09:02:02 +01:00
4183 changed files with 33521 additions and 340694 deletions

View File

@@ -1,14 +0,0 @@
{
"repoOwner": "prowler-cloud",
"repoName": "prowler",
"targetPRLabels": [
"backport"
],
"sourcePRLabels": [
"was-backported"
],
"copySourcePRLabels": false,
"copySourcePRReviewers": true,
"prTitle": "{{sourcePullRequest.title}}",
"commitConflicts": true
}

94
.env
View File

@@ -1,94 +0,0 @@
#### Important Note ####
# This file is used to store environment variables for the Prowler App.
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
#### Prowler UI Configuration ####
PROWLER_UI_VERSION="latest"
SITE_URL=http://localhost:3000
API_BASE_URL=http://prowler-api:8080/api/v1
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
AUTH_TRUST_HOST=true
UI_PORT=3000
# openssl rand -base64 32
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
#### Prowler API Configuration ####
PROWLER_API_VERSION="stable"
# PostgreSQL settings
# If running Django and celery on host, use 'localhost', else use 'postgres-db'
POSTGRES_HOST=postgres-db
POSTGRES_PORT=5432
POSTGRES_ADMIN_USER=prowler_admin
POSTGRES_ADMIN_PASSWORD=postgres
POSTGRES_USER=prowler
POSTGRES_PASSWORD=postgres
POSTGRES_DB=prowler_db
# Valkey settings
# If running Valkey and celery on host, use localhost, else use 'valkey'
VALKEY_HOST=valkey
VALKEY_PORT=6379
VALKEY_DB=0
# Django settings
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
DJANGO_BIND_ADDRESS=0.0.0.0
DJANGO_PORT=8080
DJANGO_DEBUG=False
DJANGO_SETTINGS_MODULE=config.django.production
# Select one of [ndjson|human_readable]
DJANGO_LOGGING_FORMATTER=human_readable
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
# Applies to both Django and Celery Workers
DJANGO_LOGGING_LEVEL=INFO
# Defaults to the maximum available based on CPU cores if not set.
DJANGO_WORKERS=4
# Token lifetime is in minutes
DJANGO_ACCESS_TOKEN_LIFETIME=30
# Token lifetime is in minutes
DJANGO_REFRESH_TOKEN_LIFETIME=1440
DJANGO_CACHE_MAX_AGE=3600
DJANGO_STALE_WHILE_REVALIDATE=60
DJANGO_MANAGE_DB_PARTITIONS=True
# openssl genrsa -out private.pem 2048
DJANGO_TOKEN_SIGNING_KEY="-----BEGIN PRIVATE KEY-----
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDs4e+kt7SnUJek
6V5r9zMGzXCoU5qnChfPiqu+BgANyawz+MyVZPs6RCRfeo6tlCknPQtOziyXYM2I
7X+qckmuzsjqp8+u+o1mw3VvUuJew5k2SQLPYwsiTzuFNVJEOgRo3hywGiGwS2iv
/5nh2QAl7fq2qLqZEXQa5+/xJlQggS1CYxOJgggvLyra50QZlBvPve/AxKJ/EV/Q
irWTZU5lLNI8sH2iZR05vQeBsxZ0dCnGMT+vGl+cGkqrvzQzKsYbDmabMcfTYhYi
78fpv6A4uharJFHayypYBjE39PwhMyyeycrNXlpm1jpq+03HgmDuDMHydk1tNwuT
nEC7m7iNAgMBAAECggEAA2m48nJcJbn9SVi8bclMwKkWmbJErOnyEGEy2sTK3Of+
NWx9BB0FmqAPNxn0ss8K7cANKOhDD7ZLF9E2MO4/HgfoMKtUzHRbM7MWvtEepldi
nnvcUMEgULD8Dk4HnqiIVjt3BdmGiTv46OpBnRWrkSBV56pUL+7msZmMZTjUZvh2
ZWv0+I3gtDIjo2Zo/FiwDV7CfwRjJarRpYUj/0YyuSA4FuOUYl41WAX1I301FKMH
xo3jiAYi1s7IneJ16OtPpOA34Wg5F6ebm/UO0uNe+iD4kCXKaZmxYQPh5tfB0Qa3
qj1T7GNpFNyvtG7VVdauhkb8iu8X/wl6PCwbg0RCKQKBgQD9HfpnpH0lDlHMRw9K
X7Vby/1fSYy1BQtlXFEIPTN/btJ/asGxLmAVwJ2HAPXWlrfSjVAH7CtVmzN7v8oj
HeIHfeSgoWEu1syvnv2AMaYSo03UjFFlfc/GUxF7DUScRIhcJUPCP8jkAROz9nFv
DByNjUL17Q9r43DmDiRsy0IFqQKBgQDvlJ9Uhl+Sp7gRgKYwa/IG0+I4AduAM+Gz
Dxbm52QrMGMTjaJFLmLHBUZ/ot+pge7tZZGws8YR8ufpyMJbMqPjxhIvRRa/p1Tf
E3TQPW93FMsHUvxAgY3MV5MzXFPhlNAKb+akP/RcXUhetGAuZKLubtDCWa55ZQuL
wj2OS+niRQKBgE7K8zUqNi6/22S8xhy/2GPgB1qPObbsABUofK0U6CAGLo6te+gc
6Jo84IyzFtQbDNQFW2Fr+j1m18rw9AqkdcUhQndiZS9AfG07D+zFB86LeWHt4DS4
ymIRX8Kvaak/iDcu/n3Mf0vCrhB6aetImObTj4GgrwlFvtJOmrYnO8EpAoGAIXXP
Xt25gWD9OyyNiVu6HKwA/zN7NYeJcRmdaDhO7B1A6R0x2Zml4AfjlbXoqOLlvLAf
zd79vcoAC82nH1eOPiSOq51plPDI0LMF8IN0CtyTkn1Lj7LIXA6rF1RAvtOqzppc
SvpHpZK9pcRpXnFdtBE0BMDDtl6fYzCIqlP94UUCgYEAnhXbAQMF7LQifEm34Dx8
BizRMOKcqJGPvbO2+Iyt50O5X6onU2ITzSV1QHtOvAazu+B1aG9pEuBFDQ+ASxEu
L9ruJElkOkb/o45TSF6KCsHd55ReTZ8AqnRjf5R+lyzPqTZCXXb8KTcRvWT4zQa3
VxyT2PnaSqEcexWUy4+UXoQ=
-----END PRIVATE KEY-----"
# openssl rsa -in private.pem -pubout -out public.pem
DJANGO_TOKEN_VERIFYING_KEY="-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7OHvpLe0p1CXpOlea/cz
Bs1wqFOapwoXz4qrvgYADcmsM/jMlWT7OkQkX3qOrZQpJz0LTs4sl2DNiO1/qnJJ
rs7I6qfPrvqNZsN1b1LiXsOZNkkCz2MLIk87hTVSRDoEaN4csBohsEtor/+Z4dkA
Je36tqi6mRF0Gufv8SZUIIEtQmMTiYIILy8q2udEGZQbz73vwMSifxFf0Iq1k2VO
ZSzSPLB9omUdOb0HgbMWdHQpxjE/rxpfnBpKq780MyrGGw5mmzHH02IWIu/H6b+g
OLoWqyRR2ssqWAYxN/T8ITMsnsnKzV5aZtY6avtNx4Jg7gzB8nZNbTcLk5xAu5u4
jQIDAQAB
-----END PUBLIC KEY-----"
# openssl rand -base64 32
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400

7
.github/CODEOWNERS vendored
View File

@@ -1,6 +1 @@
/* @prowler-cloud/sdk
/.github/ @prowler-cloud/sdk
prowler @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
tests @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
api @prowler-cloud/api
ui @prowler-cloud/ui
* @prowler-cloud/prowler-oss

View File

@@ -1,5 +1,6 @@
name: 🐞 Bug Report
description: Create a report to help us improve
title: "[Bug]: "
labels: ["bug", "status/needs-triage"]
body:
@@ -26,7 +27,7 @@ body:
id: actual
attributes:
label: Actual Result with Screenshots or Logs
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level ERROR --log-file $(date +%F)_error.log` then attach here the log file.
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
validations:
required: true
- type: dropdown

View File

@@ -1,6 +1,7 @@
name: 💡 Feature Request
name: 💡 Feature Request
description: Suggest an idea for this project
labels: ["feature-request", "status/needs-triage"]
labels: ["enhancement", "status/needs-triage"]
body:
- type: textarea

View File

@@ -1,3 +0,0 @@
name: "API - CodeQL Config"
paths:
- "api/"

View File

@@ -1,4 +0,0 @@
name: "SDK - CodeQL Config"
paths-ignore:
- "api/"
- "ui/"

View File

@@ -1,3 +0,0 @@
name: "UI - CodeQL Config"
paths:
- "ui/"

102
.github/dependabot.yml vendored
View File

@@ -5,112 +5,16 @@
version: 2
updates:
# v5
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "pip"
- package-ecosystem: "pip"
directory: "/api"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "pip"
- "component/api"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "github_actions"
- package-ecosystem: "npm"
directory: "/ui"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "npm"
- "component/ui"
- package-ecosystem: "docker"
directory: "/"
- package-ecosystem: "pip" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "docker"
# v4.6
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "pip"
- "v4"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "github_actions"
- "v4"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "docker"
- "v4"
# v3
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "pip"
- "v3"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "github_actions"
- "v3"
target-branch: master

67
.github/labeler.yml vendored
View File

@@ -22,73 +22,6 @@ provider/kubernetes:
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
- any-glob-to-any-file: "tests/providers/kubernetes/**"
provider/github:
- changed-files:
- any-glob-to-any-file: "prowler/providers/github/**"
- any-glob-to-any-file: "tests/providers/github/**"
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"
cli:
- changed-files:
- any-glob-to-any-file: "cli/**"
mutelist:
- changed-files:
- any-glob-to-any-file: "prowler/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
- any-glob-to-any-file: "tests/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
integration/s3:
- changed-files:
- any-glob-to-any-file: "prowler/providers/aws/lib/s3/**"
- any-glob-to-any-file: "tests/providers/aws/lib/s3/**"
integration/slack:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/slack/**"
- any-glob-to-any-file: "tests/lib/outputs/slack/**"
integration/security-hub:
- changed-files:
- any-glob-to-any-file: "prowler/providers/aws/lib/security_hub/**"
- any-glob-to-any-file: "tests/providers/aws/lib/security_hub/**"
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
output/html:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/html/**"
- any-glob-to-any-file: "tests/lib/outputs/html/**"
output/asff:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
output/ocsf:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/ocsf/**"
- any-glob-to-any-file: "tests/lib/outputs/ocsf/**"
output/csv:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
- any-glob-to-any-file: "tests/lib/outputs/csv/**"
component/api:
- changed-files:
- any-glob-to-any-file: "api/**"
component/ui:
- changed-files:
- any-glob-to-any-file: "ui/**"

View File

@@ -2,20 +2,12 @@
Please include relevant motivation and context for this PR.
If fixes an issue please add it with `Fix #XXXX`
### Description
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
### Checklist
- Are there new checks included in this PR? Yes / No
- If so, do we need to update permissions for the provider? Please review this carefully.
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
- [ ] Review if backport is needed.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -1,98 +0,0 @@
name: API - Build and Push containers
on:
push:
branches:
- "master"
paths:
- "api/**"
- ".github/workflows/api-build-lint-push-containers.yml"
# Uncomment the code below to test this action on PRs
# pull_request:
# branches:
# - "master"
# paths:
# - "api/**"
# - ".github/workflows/api-build-lint-push-containers.yml"
release:
types: [published]
env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
STABLE_TAG: stable
WORKING_DIRECTORY: ./api
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
jobs:
repository-check:
name: Repository check
runs-on: ubuntu-latest
outputs:
is_repo: ${{ steps.repository_check.outputs.is_repo }}
steps:
- name: Repository check
id: repository_check
working-directory: /tmp
run: |
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
then
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
else
echo "This action only runs for prowler-cloud/prowler"
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
fi
# Build Prowler OSS container
container-build-push:
needs: repository-check
if: needs.repository-check.outputs.is_repo == 'true'
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,173 +0,0 @@
name: API - Pull Request
on:
push:
branches:
- "master"
- "v5.*"
paths:
- "api/**"
pull_request:
branches:
- "master"
- "v5.*"
paths:
- "api/**"
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
POSTGRES_ADMIN_USER: prowler
POSTGRES_ADMIN_PASSWORD: S3cret
POSTGRES_USER: prowler_user
POSTGRES_PASSWORD: prowler
POSTGRES_DB: postgres-db
VALKEY_HOST: localhost
VALKEY_PORT: 6379
VALKEY_DB: 0
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.12"]
# Service containers to run with `test`
services:
# Label used to access the service container
postgres:
image: postgres
env:
POSTGRES_HOST: ${{ env.POSTGRES_HOST }}
POSTGRES_PORT: ${{ env.POSTGRES_PORT }}
POSTGRES_USER: ${{ env.POSTGRES_USER }}
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
POSTGRES_DB: ${{ env.POSTGRES_DB }}
# Set health checks to wait until postgres has started
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
valkey:
image: valkey/valkey:7-alpine3.19
env:
VALKEY_HOST: ${{ env.VALKEY_HOST }}
VALKEY_PORT: ${{ env.VALKEY_PORT }}
VALKEY_DB: ${{ env.VALKEY_DB }}
# Set health checks to wait until postgres has started
ports:
- 6379:6379
options: >-
--health-cmd "valkey-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
with:
files: api/**
files_ignore: |
api/.github/**
api/docs/**
api/permissions/**
api/README.md
api/mkdocs.yml
- name: Install poetry
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry==1.8.5
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry install
poetry run pip list
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
grep '"tag_name":' | \
sed -E 's/.*"v([^"]+)".*/\1/' \
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
&& chmod +x /tmp/hadolint
- name: Poetry check
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry check --lock
- name: Lint with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff check . --exclude contrib
- name: Check Format with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff format --check . --exclude contrib
- name: Lint with pylint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
- name: Bandit
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612,66963
- name: Vulture
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
- name: Hadolint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: api

View File

@@ -1,47 +0,0 @@
name: Prowler - Automatic Backport
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
env:
# The prefix of the label that triggers the backport must not contain the branch name
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
BACKPORT_LABEL_PREFIX: backport-to-
BACKPORT_LABEL_IGNORE: was-backported
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
runs-on: ubuntu-latest
permissions:
id-token: write
pull-requests: write
contents: write
steps:
- name: Check labels
id: preview_label_check
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
with:
allow_failure: true
prefix_mode: true
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
repo_token: ${{ secrets.GITHUB_TOKEN }}
- name: Backport Action
if: steps.preview_label_check.outputs.label_check == 'success'
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
- name: Info log
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
run: cat ~/.backport/backport.debug.log

View File

@@ -1,24 +0,0 @@
name: Prowler - Pull Request Documentation Link
on:
pull_request:
branches:
- 'master'
- 'v3'
paths:
- 'docs/**'
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
jobs:
documentation-link:
name: Documentation Link
runs-on: ubuntu-latest
steps:
- name: Leave PR comment with the Prowler Documentation URI
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_NUMBER }}
body: |
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)

View File

@@ -0,0 +1,117 @@
name: build-lint-push-containers
on:
push:
branches:
- "master"
paths-ignore:
- ".github/**"
- "README.md"
- "docs/**"
release:
types: [published]
env:
AWS_REGION_STG: eu-west-1
AWS_REGION_PLATFORM: eu-west-1
AWS_REGION: us-east-1
IMAGE_NAME: prowler
LATEST_TAG: latest
STABLE_TAG: stable
TEMPORARY_TAG: temporary
DOCKERFILE_PATH: ./Dockerfile
PYTHON_VERSION: 3.9
jobs:
# Build Prowler OSS container
container-build-push:
# needs: dockerfile-linter
runs-on: ubuntu-latest
env:
POETRY_VIRTUALENVS_CREATE: "false"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup python (release)
if: github.event_name == 'release'
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install dependencies (release)
if: github.event_name == 'release'
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Update Prowler version (release)
if: github.event_name == 'release'
run: |
poetry version ${{ github.event.release.tag_name }}
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Public ECR
uses: docker/login-action@v3
with:
registry: public.ecr.aws
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
password: ${{ secrets.PUBLIC_ECR_AWS_SECRET_ACCESS_KEY }}
env:
AWS_REGION: ${{ env.AWS_REGION }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@v5
with:
push: true
tags: |
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
file: ${{ env.DOCKERFILE_PATH }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v5
with:
# Use local context to get changes
# https://github.com/docker/build-push-action#path-context
context: .
push: true
tags: |
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
file: ${{ env.DOCKERFILE_PATH }}
cache-from: type=gha
cache-to: type=gha,mode=max
dispatch-action:
needs: container-build-push
runs-on: ubuntu-latest
steps:
- name: Get latest commit info
if: github.event_name == 'push'
run: |
LATEST_COMMIT_HASH=$(echo ${{ github.event.after }} | cut -b -7)
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
- name: Dispatch event for latest
if: github.event_name == 'push'
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" -H "X-GitHub-Api-Version: 2022-11-28" --data '{"event_type":"dispatch","client_payload":{"version":"latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
- name: Dispatch event for release
if: github.event_name == 'release'
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" -H "X-GitHub-Api-Version: 2022-11-28" --data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ github.event.release.tag_name }}"}}'

View File

@@ -9,21 +9,14 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: API - CodeQL
name: "CodeQL"
on:
push:
branches:
- "master"
- "v5.*"
paths:
- "api/**"
branches: [ "master", "prowler-4.0-dev" ]
pull_request:
branches:
- "master"
- "v5.*"
paths:
- "api/**"
# The branches below must be a subset of the branches above
branches: [ "master", "prowler-4.0-dev" ]
schedule:
- cron: '00 12 * * *'
@@ -51,7 +44,12 @@ jobs:
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/api-codeql-config.yml
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@@ -1,4 +1,4 @@
name: Prowler - Find secrets
name: find-secrets
on: pull_request
@@ -11,9 +11,8 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.88.2
uses: trufflesecurity/trufflehog@v3.67.6
with:
path: ./
base: ${{ github.event.repository.default_branch }}
head: HEAD
extra_args: --only-verified

View File

@@ -1,11 +1,10 @@
name: Prowler - PR Labeler
name: "Pull Request Labeler"
on:
pull_request_target:
branches:
- "master"
- "v3"
- "v4.*"
- "prowler-4.0-dev"
jobs:
labeler:

View File

@@ -1,18 +1,14 @@
name: SDK - Pull Request
name: pr-lint-test
on:
push:
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
- "prowler-4.0-dev"
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
- "prowler-4.0-dev"
jobs:
build:
runs-on: ubuntu-latest
@@ -22,37 +18,28 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v42
with:
files: ./**
files_ignore: |
.github/**
README.md
docs/**
permissions/**
api/**
ui/**
README.md
mkdocs.yml
.backportrc.json
.env
docker-compose*
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry==1.8.5
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
@@ -63,56 +50,44 @@ jobs:
sed -E 's/.*"v([^"]+)".*/\1/' \
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
&& chmod +x /tmp/hadolint
- name: Poetry check
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry check --lock
poetry lock --check
- name: Lint with flake8
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
- name: Checking format with black
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run black --exclude api ui --check .
poetry run black --check .
- name: Lint with pylint
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
- name: Bandit
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612 -r pyproject.toml
poetry run safety check
- name: Vulture
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
poetry run vulture --exclude "contrib" --min-confidence 100 .
- name: Hadolint
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler

71
.github/workflows/pypi-release.yml vendored Normal file
View File

@@ -0,0 +1,71 @@
name: pypi-release
on:
release:
types: [published]
env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
GITHUB_BRANCH: master
jobs:
release-prowler-job:
runs-on: ubuntu-latest
env:
POETRY_VIRTUALENVS_CREATE: "false"
name: Release Prowler to PyPI
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4
with:
ref: ${{ env.GITHUB_BRANCH }}
- name: Install dependencies
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: setup python
uses: actions/setup-python@v5
with:
python-version: 3.9
cache: 'poetry'
- name: Change version and Build package
run: |
poetry version ${{ env.RELEASE_TAG }}
git config user.name "github-actions"
git config user.email "<noreply@github.com>"
git add prowler/config/config.py pyproject.toml
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}"
git push -f origin ${{ env.RELEASE_TAG }}
poetry build
- name: Publish prowler package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish
# Create pull request with new version
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
branch: release-${{ env.RELEASE_TAG }}
labels: "status/waiting-for-revision, severity/low"
title: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}"
body: |
### Description
This PR updates Prowler Version to ${{ env.RELEASE_TAG }}.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Replicate PyPi Package
run: |
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
pip install toml
python util/replicate_pypi_package.py
poetry build
- name: Publish prowler-cloud package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish

View File

@@ -1,6 +1,6 @@
# This is a basic workflow to help you get started with Actions
name: SDK - Refresh AWS services' regions
name: Refresh regions of AWS services
on:
schedule:
@@ -50,13 +50,13 @@ jobs:
# Create pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services"
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
title: "chore(regions_update): Changes in regions for AWS services"
labels: "status/waiting-for-revision, severity/low, provider/aws"
title: "chore(regions_update): Changes in regions for AWS services."
body: |
### Description

View File

@@ -1,186 +0,0 @@
name: SDK - Build and Push containers
on:
push:
branches:
# For `v3-latest`
- "v3"
# For `v4-latest`
- "v4.6"
# For `latest`
- "master"
paths-ignore:
- ".github/**"
- "README.md"
- "docs/**"
- "ui/**"
- "api/**"
release:
types: [published]
env:
# AWS Configuration
AWS_REGION_STG: eu-west-1
AWS_REGION_PLATFORM: eu-west-1
AWS_REGION: us-east-1
# Container's configuration
IMAGE_NAME: prowler
DOCKERFILE_PATH: ./Dockerfile
# Tags
LATEST_TAG: latest
STABLE_TAG: stable
# The RELEASE_TAG is set during runtime in releases
RELEASE_TAG: ""
# The PROWLER_VERSION and PROWLER_VERSION_MAJOR are set during runtime in releases
PROWLER_VERSION: ""
PROWLER_VERSION_MAJOR: ""
# TEMPORARY_TAG: temporary
# Python configuration
PYTHON_VERSION: 3.12
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler
jobs:
# Build Prowler OSS container
container-build-push:
# needs: dockerfile-linter
runs-on: ubuntu-latest
outputs:
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
env:
POETRY_VIRTUALENVS_CREATE: "false"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install Poetry
run: |
pipx install poetry==1.8.5
pipx inject poetry poetry-bumpversion
- name: Get Prowler version
id: get-prowler-version
run: |
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
# Store prowler version major just for the release
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
echo "PROWLER_VERSION_MAJOR=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION_MAJOR=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_OUTPUT}"
case ${PROWLER_VERSION_MAJOR} in
3)
echo "LATEST_TAG=v3-latest" >> "${GITHUB_ENV}"
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
;;
4)
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
;;
5)
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
;;
*)
# Fallback if any other version is present
echo "Releasing another Prowler major version, aborting..."
exit 1
;;
esac
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Public ECR
uses: docker/login-action@v3
with:
registry: public.ecr.aws
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
password: ${{ secrets.PUBLIC_ECR_AWS_SECRET_ACCESS_KEY }}
env:
AWS_REGION: ${{ env.AWS_REGION }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@v6
with:
push: true
tags: |
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
file: ${{ env.DOCKERFILE_PATH }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
with:
# Use local context to get changes
# https://github.com/docker/build-push-action#path-context
context: .
push: true
tags: |
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.PROWLER_VERSION }}
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.PROWLER_VERSION }}
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.PROWLER_VERSION }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
file: ${{ env.DOCKERFILE_PATH }}
cache-from: type=gha
cache-to: type=gha,mode=max
dispatch-action:
needs: container-build-push
runs-on: ubuntu-latest
steps:
- name: Get latest commit info (latest)
if: github.event_name == 'push'
run: |
LATEST_COMMIT_HASH=$(echo ${{ github.event.after }} | cut -b -7)
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
- name: Dispatch event (latest)
if: github.event_name == 'push' && needs.container-build-push.outputs.prowler_version_major == '3'
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
- name: Dispatch event (release)
if: github.event_name == 'release' && needs.container-build-push.outputs.prowler_version_major == '3'
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'

View File

@@ -1,65 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: SDK - CodeQL
on:
push:
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
paths-ignore:
- 'ui/**'
- 'api/**'
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
paths-ignore:
- 'ui/**'
- 'api/**'
schedule:
- cron: '00 12 * * *'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/sdk-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -1,98 +0,0 @@
name: SDK - PyPI release
on:
release:
types: [published]
env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: 3.11
CACHE: "poetry"
jobs:
repository-check:
name: Repository check
runs-on: ubuntu-latest
outputs:
is_repo: ${{ steps.repository_check.outputs.is_repo }}
steps:
- name: Repository check
id: repository_check
working-directory: /tmp
run: |
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
then
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
else
echo "This action only runs for prowler-cloud/prowler"
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
fi
release-prowler-job:
runs-on: ubuntu-latest
needs: repository-check
if: needs.repository-check.outputs.is_repo == 'true'
env:
POETRY_VIRTUALENVS_CREATE: "false"
name: Release Prowler to PyPI
steps:
- name: Repository check
working-directory: /tmp
run: |
if [[ "${{ github.repository }}" != "prowler-cloud/prowler" ]]; then
echo "This action only runs for prowler-cloud/prowler"
exit 1
fi
- name: Get Prowler version
run: |
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
case ${PROWLER_VERSION%%.*} in
3)
echo "Releasing Prowler v3 with tag ${PROWLER_VERSION}"
;;
4)
echo "Releasing Prowler v4 with tag ${PROWLER_VERSION}"
;;
5)
echo "Releasing Prowler v5 with tag ${PROWLER_VERSION}"
;;
*)
echo "Releasing another Prowler major version, aborting..."
exit 1
;;
esac
- uses: actions/checkout@v4
- name: Install dependencies
run: |
pipx install poetry==1.8.5
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: ${{ env.CACHE }}
- name: Build Prowler package
run: |
poetry build
- name: Publish Prowler package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish
- name: Replicate PyPI package
run: |
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
pip install toml
python util/replicate_pypi_package.py
poetry build
- name: Publish prowler-cloud package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish

View File

@@ -1,98 +0,0 @@
name: UI - Build and Push containers
on:
push:
branches:
- "master"
paths:
- "ui/**"
- ".github/workflows/ui-build-lint-push-containers.yml"
# Uncomment the below code to test this action on PRs
# pull_request:
# branches:
# - "master"
# paths:
# - "ui/**"
# - ".github/workflows/ui-build-lint-push-containers.yml"
release:
types: [published]
env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
STABLE_TAG: stable
WORKING_DIRECTORY: ./ui
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
jobs:
repository-check:
name: Repository check
runs-on: ubuntu-latest
outputs:
is_repo: ${{ steps.repository_check.outputs.is_repo }}
steps:
- name: Repository check
id: repository_check
working-directory: /tmp
run: |
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
then
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
else
echo "This action only runs for prowler-cloud/prowler"
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
fi
# Build Prowler OSS container
container-build-push:
needs: repository-check
if: needs.repository-check.outputs.is_repo == 'true'
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,59 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: UI - CodeQL
on:
push:
branches:
- "master"
- "v5.*"
paths:
- "ui/**"
pull_request:
branches:
- "master"
- "v5.*"
paths:
- "ui/**"
schedule:
- cron: "00 12 * * *"
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ["javascript"]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/ui-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -1,41 +0,0 @@
name: UI - Pull Request
on:
push:
branches:
- "master"
- "v5.*"
paths:
- "ui/**"
pull_request:
branches:
- master
- "v5.*"
paths:
- 'ui/**'
jobs:
test-and-coverage:
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest]
node-version: [20.x]
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Install dependencies
working-directory: ./ui
run: npm install
- name: Run Healthcheck
working-directory: ./ui
run: npm run healthcheck
- name: Build the application
working-directory: ./ui
run: npm run build

13
.gitignore vendored
View File

@@ -9,10 +9,8 @@
__pycache__
venv/
build/
/dist/
dist/
*.egg-info/
*/__pycache__/*.pyc
.idea/
# Session
Session.vim
@@ -47,16 +45,9 @@ junit-reports/
*.tfstate
# .env
ui/.env*
api/.env*
.env*
# Coverage
.coverage*
.coverage
coverage*
# Node
node_modules
# Persistent data
_data/

View File

@@ -1,7 +1,7 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v4.5.0
hooks:
- id: check-merge-conflict
- id: check-yaml
@@ -15,7 +15,7 @@ repos:
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
rev: v2.12.0
hooks:
- id: pretty-format-toml
args: [--autofix]
@@ -23,13 +23,12 @@ repos:
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
rev: v0.9.0
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v2.3.1
rev: v2.2.1
hooks:
- id: autoflake
args:
@@ -46,7 +45,7 @@ repos:
args: ["--profile", "black"]
- repo: https://github.com/psf/black
rev: 24.4.2
rev: 24.1.1
hooks:
- id: black
@@ -58,14 +57,14 @@ repos:
args: ["--ignore=E266,W503,E203,E501,W605"]
- repo: https://github.com/python-poetry/poetry
rev: 1.8.0
rev: 1.7.0
hooks:
- id: poetry-check
- id: poetry-lock
args: ["--no-update"]
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
rev: v2.12.1-beta
hooks:
- id: hadolint
args: ["--ignore=DL3013"]
@@ -85,24 +84,24 @@ repos:
# For running trufflehog in docker, use the following entry instead:
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["pre-commit", "pre-push"]
stages: ["commit", "push"]
- id: bandit
name: bandit
description: "Bandit is a tool for finding common security issues in Python code"
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/' -r .'
language: system
files: '.*\.py'
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
entry: bash -c 'safety check --ignore 70612,66963'
entry: bash -c 'safety check'
language: system
- id: vulture
name: vulture
description: "Vulture finds unused code in Python programs."
entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/" --min-confidence 100 .'
entry: bash -c 'vulture --exclude "contrib" --min-confidence 100 .'
language: system
files: '.*\.py'

View File

@@ -8,18 +8,16 @@ version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.11"
python: "3.9"
jobs:
post_create_environment:
# Install poetry
# https://python-poetry.org/docs/#installing-manually
- python -m pip install poetry
- pip install poetry
# Tell poetry to not use a virtual environment
- poetry config virtualenvs.create false
post_install:
# Install dependencies with 'docs' dependency group
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
# VIRTUAL_ENV needs to be set manually for now.
# See https://github.com/readthedocs/readthedocs.org/pull/11152/
- VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} python -m poetry install --only=docs
- poetry install -E docs
mkdocs:
configuration: mkdocs.yml

View File

@@ -10,4 +10,4 @@
Want some swag as appreciation for your contribution?
# Prowler Developer Guide
https://docs.prowler.com/projects/prowler-open-source/en/latest/developer-guide/introduction/
https://docs.prowler.cloud/en/latest/tutorials/developer-guide/

View File

@@ -1,35 +1,31 @@
FROM python:3.12.8-alpine3.20
FROM python:3.11-alpine
LABEL maintainer="https://github.com/prowler-cloud/prowler"
# Update system dependencies and install essential tools
# Update system dependencies
#hadolint ignore=DL3018
RUN apk --no-cache upgrade && apk --no-cache add curl git
RUN apk --no-cache upgrade && apk --no-cache add curl
# Create non-root user
# Create nonroot user
RUN mkdir -p /home/prowler && \
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
echo 'prowler:x:1000:' > /etc/group && \
chown -R prowler:prowler /home/prowler
USER prowler
# Copy necessary files
# Copy necessary files
WORKDIR /home/prowler
COPY prowler/ /home/prowler/prowler/
COPY dashboard/ /home/prowler/dashboard/
COPY prowler/ /home/prowler/prowler/
COPY pyproject.toml /home/prowler
COPY README.md /home/prowler
# Install Python dependencies
# Install dependencies
ENV HOME='/home/prowler'
ENV PATH="$HOME/.local/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
#hadolint ignore=DL3013
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir .
# Remove deprecated dash dependencies
RUN pip uninstall dash-html-components -y && \
pip uninstall dash-core-components -y
# Remove Prowler directory and build files
USER 0
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info

View File

@@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright @ 2024 Toni de la Fuente
Copyright 2018 Netflix, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@@ -27,7 +27,7 @@ lint: ## Lint Code
@echo "Running black... "
black --check .
@echo "Running pylint..."
pylint --disable=W,C,R,E -j 0 prowler util
pylint --disable=W,C,R,E -j 0 providers lib util config
##@ PyPI
pypi-clean: ## Delete the distribution files

358
README.md
View File

@@ -1,22 +1,17 @@
<p align="center">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png#gh-light-mode-only" width="50%" height="50%">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png?raw=True#gh-light-mode-only" width="350" height="115">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png?raw=True#gh-dark-mode-only" width="350" height="115">
</p>
<p align="center">
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
</p>
<p align="center">
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
</p>
<p align="center">
<a href="https://goto.prowler.com/slack"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
<br>
<a href="https://goto.prowler.com/slack">Join our Prowler community!</a>
</p>
<hr>
<p align="center">
<a href="https://goto.prowler.com/slack"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
@@ -29,7 +24,7 @@
<p align="center">
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler?include_prereleases"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
@@ -37,211 +32,240 @@
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
</p>
<hr>
<p align="center">
<img align="center" src="/docs/img/prowler-cli-quick.gif" width="100%" height="100%">
</p>
# Description
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
Prowler App is a web application that allows you to run Prowler in your cloud provider accounts and visualize the results in a user-friendly interface.
![Prowler App](docs/img/overview.png)
>More details at [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
## Prowler CLI
```console
prowler <provider>
```
![Prowler CLI Execution](docs/img/short-display.png)
## Prowler Dashboard
```console
prowler dashboard
```
![Prowler Dashboard](docs/img/dashboard.png)
`Prowler` is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.cloud/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.cloud/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 561 | 81 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 4 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 139 | 18 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
| AWS | 302 | 61 -> `prowler aws --list-services` | 27 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
| GCP | 73 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 37 | 4 -> `prowler azure --list-services` | CIS soon | 1 -> `prowler azure --list-categories` |
| Kubernetes | Work In Progress | - | CIS soon | - |
# 💻 Installation
# 📖 Documentation
## Prowler App
The full documentation can now be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
Prowler App can be installed in different ways, depending on your environment:
## Looking for Prowler v2 documentation?
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
> See how to use Prowler App in the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
# ⚙️ Install
### Docker Compose
**Requirements**
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
**Commands**
``` console
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/docker-compose.yml
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/.env
docker compose up -d
```
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
### From GitHub
**Requirements**
* `git` installed.
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
**Commands to run the API**
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
set -a
source .env
docker compose up postgres valkey -d
cd src/backend
python manage.py migrate --database admin
gunicorn -c config/guniconf.py config.wsgi:application
```
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
**Commands to run the API Worker**
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
set -a
source .env
cd src/backend
python -m celery -A config.celery worker -l info -E
```
**Commands to run the API Scheduler**
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
set -a
source .env
cd src/backend
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
```
**Commands to run the UI**
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler/ui
npm install
npm run build
npm start
```
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
## Prowler CLI
### Pip package
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
## Pip package
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
```console
pip install prowler
prowler -v
```
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
More details at https://docs.prowler.cloud
### Containers
## Containers
The available versions of Prowler CLI are the following:
The available versions of Prowler are the following:
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
- `v4-latest`: in sync with `v4` branch (bear in mind that it is not a stable version)
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
- `latest`: in sync with master branch (bear in mind that it is not a stable version)
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
- `stable`: this tag always point to the latest release.
- `v4-stable`: this tag always point to the latest release for v4.
- `v3-stable`: this tag always point to the latest release for v3.
The container images are available here:
- Prowler CLI:
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
- Prowler App:
- [DockerHub - Prowler UI](https://hub.docker.com/r/prowlercloud/prowler-ui/tags)
- [DockerHub - Prowler API](https://hub.docker.com/r/prowlercloud/prowler-api/tags)
### From GitHub
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
## From Github
Python >= 3.9, < 3.13 is required with pip and poetry:
``` console
```
git clone https://github.com/prowler-cloud/prowler
cd prowler
poetry shell
poetry install
python prowler.py -v
```
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
# 📐✏️ High level architecture
## Prowler App
The **Prowler App** consists of three main components:
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
- **Prowler UI**: A user-friendly web interface for running Prowler and viewing results, powered by Next.js.
- **Prowler API**: The backend API that executes Prowler scans and stores the results, built with Django REST Framework.
- **Prowler SDK**: A Python SDK that integrates with the Prowler CLI for advanced functionality.
![Architecture](https://github.com/prowler-cloud/prowler/assets/38561120/080261d9-773d-4af1-af79-217a273e3176)
![Prowler App Architecture](docs/img/prowler-app-architecture.png)
# 📝 Requirements
## Prowler CLI
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html#), [Azure SDK](https://azure.github.io/azure-sdk-for-python/) and [GCP API Python Client](https://github.com/googleapis/google-api-python-client/).
## AWS
![Architecture](docs/img/architecture.png)
Since Prowler uses AWS Credentials under the hood, you can follow any authentication method as described [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence).
Make sure you have properly configured your AWS-CLI with a valid Access Key and Region or declare AWS variables properly (or instance profile/role):
# Deprecations from v3
```console
aws configure
```
## General
- `Allowlist` now is called `Mutelist`.
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
- All `INFO` finding's status has changed to `MANUAL`.
- The CSV output format is common for all the providers.
or
We have deprecated some of our outputs formats:
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
```console
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
export AWS_SESSION_TOKEN="XXXXXXXXX"
```
Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure, add the following AWS managed policies to the user or role being used:
- `arn:aws:iam::aws:policy/SecurityAudit`
- `arn:aws:iam::aws:policy/job-function/ViewOnlyAccess`
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
## Azure
Prowler for Azure supports the following authentication types:
- Service principal authentication by environment variables (Enterprise Application)
- Current az cli credentials stored
- Interactive browser authentication
- Managed identity authentication
### Service Principal authentication
To allow Prowler assume the service principal identity to start the scan, it is needed to configure the following environment variables:
```console
export AZURE_CLIENT_ID="XXXXXXXXX"
export AZURE_TENANT_ID="XXXXXXXXX"
export AZURE_CLIENT_SECRET="XXXXXXX"
```
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
### AZ CLI / Browser / Managed Identity authentication
The other three cases do not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan. Also `--browser-auth` needs the tenant id to be specified with `--tenant-id`.
### Permissions
To use each one, you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
- **Azure Active Directory permissions**: Used to retrieve metadata from the identity assumed by Prowler and future AAD checks (not mandatory to have access to execute the tool)
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
#### Azure Active Directory scope
Azure Active Directory (AAD) permissions required by the tool are the following:
- `Directory.Read.All`
- `Policy.Read.All`
#### Subscriptions scope
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
- `Security Reader`
- `Reader`
## Google Cloud Platform
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
# 💻 Basic Usage
To run prowler, you will need to specify the provider (e.g aws or azure):
```console
prowler <provider>
```
![Prowler Execution](https://github.com/prowler-cloud/prowler/blob/b91b0103ff38e66a915c8a0ed84905a07e4aae1d/docs/img/short-display.png?raw=True)
> Running the `prowler` command without options will use your environment variable credentials.
By default, prowler will generate a CSV, a JSON and a HTML report, however you can generate JSON-ASFF (only for AWS Security Hub) report with `-M` or `--output-modes`:
```console
prowler <provider> -M csv json json-asff html
```
The html report will be located in the `output` directory as the other files and it will look like:
![Prowler Execution](https://github.com/prowler-cloud/prowler/blob/62c1ce73bbcdd6b9e5ba03dfcae26dfd165defd9/docs/img/html-output.png?raw=True)
You can use `-l`/`--list-checks` or `--list-services` to list all available checks or services within the provider.
```console
prowler <provider> --list-checks
prowler <provider> --list-services
```
For executing specific checks or services you can use options `-c`/`--checks` or `-s`/`--services`:
```console
prowler aws --checks s3_bucket_public_access
prowler aws --services s3 ec2
```
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
```console
prowler aws --excluded-checks s3_bucket_public_access
prowler aws --excluded-services s3 ec2
```
You can always use `-h`/`--help` to access to the usage information and all the possible options:
```console
prowler -h
```
## Checks Configurations
Several Prowler's checks have user configurable variables that can be modified in a common **configuration file**.
This file can be found in the following path:
```
prowler/config/config.yaml
```
## AWS
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
Use a custom AWS profile with `-p`/`--profile` and/or AWS regions which you want to audit with `-f`/`--filter-region`:
# 📖 Documentation
```console
prowler aws --profile custom-profile -f us-east-1 eu-south-2
```
> By default, `prowler` will scan all AWS regions.
Install, Usage, Tutorials and Developer Guide is at https://docs.prowler.com/
## Azure
With Azure you need to specify which auth method is going to be used:
```console
prowler azure [--sp-env-auth, --az-cli-auth, --browser-auth, --managed-identity-auth]
```
> By default, `prowler` will scan all Azure subscriptions.
## Google Cloud Platform
Optionally, you can provide the location of an application credential JSON file with the following argument:
```console
prowler gcp --credentials-file path
```
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
# 📃 License

View File

@@ -12,7 +12,7 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
## Reporting a Vulnerability
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to https://support.prowler.com.
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
The information you share with ProwlerPro as part of this process is kept confidential within ProwlerPro. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.

View File

@@ -1,41 +0,0 @@
# Django settings
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1
DJANGO_BIND_ADDRESS=0.0.0.0
DJANGO_PORT=8000
DJANGO_DEBUG=False
# Select one of [production|devel]
DJANGO_SETTINGS_MODULE=config.django.[production|devel]
# Select one of [ndjson|human_readable]
DJANGO_LOGGING_FORMATTER=[ndjson|human_readable]
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
# Applies to both Django and Celery Workers
DJANGO_LOGGING_LEVEL=INFO
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
DJANGO_TOKEN_SIGNING_KEY=""
DJANGO_TOKEN_VERIFYING_KEY=""
# Token lifetime is in minutes
DJANGO_ACCESS_TOKEN_LIFETIME=30
DJANGO_REFRESH_TOKEN_LIFETIME=1440
DJANGO_CACHE_MAX_AGE=3600
DJANGO_STALE_WHILE_REVALIDATE=60
DJANGO_SECRETS_ENCRYPTION_KEY=""
# Decide whether to allow Django manage database table partitions
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
# PostgreSQL settings
# If running django and celery on host, use 'localhost', else use 'postgres-db'
POSTGRES_HOST=[localhost|postgres-db]
POSTGRES_PORT=5432
POSTGRES_ADMIN_USER=prowler
POSTGRES_ADMIN_PASSWORD=S3cret
POSTGRES_USER=prowler_user
POSTGRES_PASSWORD=S3cret
POSTGRES_DB=prowler_db
# Valkey settings
# If running django and celery on host, use localhost, else use 'valkey'
VALKEY_HOST=[localhost|valkey]
VALKEY_PORT=6379
VALKEY_DB=0

168
api/.gitignore vendored
View File

@@ -1,168 +0,0 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.pyc
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
/_data/
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
*.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/
# VSCode
.vscode/

View File

@@ -1,91 +0,0 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-merge-conflict
- id: check-yaml
args: ["--unsafe"]
- id: check-json
- id: end-of-file-fixer
- id: trailing-whitespace
- id: no-commit-to-branch
- id: pretty-format-json
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
hooks:
- id: pretty-format-toml
args: [--autofix]
files: pyproject.toml
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.5.0
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/python-poetry/poetry
rev: 1.8.0
hooks:
- id: poetry-check
args: ["--directory=src"]
- id: poetry-lock
args: ["--no-update", "--directory=src"]
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
hooks:
- id: hadolint
args: ["--ignore=DL3013", "Dockerfile"]
- repo: local
hooks:
- id: pylint
name: pylint
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
language: system
files: '.*\.py'
- id: trufflehog
name: TruffleHog
description: Detect secrets in your data.
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
# For running trufflehog in docker, use the following entry instead:
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["commit", "push"]
- id: bandit
name: bandit
description: "Bandit is a tool for finding common security issues in Python code"
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
language: system
files: '.*\.py'
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
entry: bash -c 'poetry run safety check --ignore 70612,66963'
language: system
- id: vulture
name: vulture
description: "Vulture finds unused code in Python programs."
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
language: system
files: '.*\.py'

View File

@@ -1,46 +0,0 @@
FROM python:3.12.8-alpine3.20 AS build
LABEL maintainer="https://github.com/prowler-cloud/api"
# hadolint ignore=DL3018
RUN apk --no-cache add gcc python3-dev musl-dev linux-headers curl-dev
RUN apk --no-cache upgrade && \
addgroup -g 1000 prowler && \
adduser -D -u 1000 -G prowler prowler
USER prowler
WORKDIR /home/prowler
COPY pyproject.toml ./
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir poetry
COPY src/backend/ ./backend/
ENV PATH="/home/prowler/.local/bin:$PATH"
RUN poetry install && \
rm -rf ~/.cache/pip
COPY docker-entrypoint.sh ./docker-entrypoint.sh
WORKDIR /home/prowler/backend
# Development image
# hadolint ignore=DL3006
FROM build AS dev
USER 0
# hadolint ignore=DL3018
RUN apk --no-cache add curl vim
USER prowler
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
# Production image
FROM build
ENTRYPOINT ["../docker-entrypoint.sh", "prod"]

View File

@@ -1,271 +0,0 @@
# Description
This repository contains the JSON API and Task Runner components for Prowler, which facilitate a complete backend that interacts with the Prowler SDK and is used by the Prowler UI.
# Components
The Prowler API is composed of the following components:
- The JSON API, which is an API built with Django Rest Framework.
- The Celery worker, which is responsible for executing the background tasks that are defined in the JSON API.
- The PostgreSQL database, which is used to store the data.
- The Valkey database, which is an in-memory database which is used as a message broker for the Celery workers.
## Note about Valkey
[Valkey](https://valkey.io/) is an open source (BSD) high performance key/value datastore.
Valkey exposes a Redis 7.2 compliant API. Any service that exposes the Redis API can be used with Prowler API.
# Modify environment variables
Under the root path of the project, you can find a file called `.env.example`. This file shows all the environment variables that the project uses. You *must* create a new file called `.env` and set the values for the variables.
## Local deployment
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
To do this, you can run:
```console
poetry shell
set -a
source .env
```
# 🚀 Production deployment
## Docker deployment
This method requires `docker` and `docker compose`.
### Clone the repository
```console
# HTTPS
git clone https://github.com/prowler-cloud/api.git
# SSH
git clone git@github.com:prowler-cloud/api.git
```
### Build the base image
```console
docker compose --profile prod build
```
### Run the production service
This command will start the Django production server and the Celery worker and also the Valkey and PostgreSQL databases.
```console
docker compose --profile prod up -d
```
You can access the server in `http://localhost:8080`.
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
### View the Production Server Logs
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
```console
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
## Local deployment
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
### Clone the repository
```console
# HTTPS
git clone https://github.com/prowler-cloud/api.git
# SSH
git clone git@github.com:prowler-cloud/api.git
```
### Install all dependencies with Poetry
```console
poetry install
poetry shell
```
## Start the PostgreSQL Database and Valkey
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
```console
docker compose up postgres valkey -d
```
## Deploy Django and the Celery worker
### Run migrations
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
```console
cd src/backend
python manage.py migrate --database admin
```
### Run the Celery worker
```console
cd src/backend
python -m celery -A config.celery worker -l info -E
```
### Run the Django server with Gunicorn
```console
cd src/backend
gunicorn -c config/guniconf.py config.wsgi:application
```
> By default, the Gunicorn server will try to use as many workers as your machine can handle. You can manually change that in the `src/backend/config/guniconf.py` file.
# 🧪 Development guide
## Local deployment
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
### Clone the repository
```console
# HTTPS
git clone https://github.com/prowler-cloud/api.git
# SSH
git clone git@github.com:prowler-cloud/api.git
```
### Start the PostgreSQL Database and Valkey
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
```console
docker compose up postgres valkey -d
```
### Install the Python dependencies
> You must have Poetry installed
```console
poetry install
poetry shell
```
### Apply migrations
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
```console
cd src/backend
python manage.py migrate --database admin
```
### Run the Django development server
```console
cd src/backend
python manage.py runserver
```
You can access the server in `http://localhost:8000`.
All changes in the code will be automatically reloaded in the server.
### Run the Celery worker
```console
python -m celery -A config.celery worker -l info -E
```
The Celery worker does not detect and reload changes in the code, so you need to restart it manually when you make changes.
## Docker deployment
This method requires `docker` and `docker compose`.
### Clone the repository
```console
# HTTPS
git clone https://github.com/prowler-cloud/api.git
# SSH
git clone git@github.com:prowler-cloud/api.git
```
### Build the base image
```console
docker compose --profile dev build
```
### Run the development service
This command will start the Django development server and the Celery worker and also the Valkey and PostgreSQL databases.
```console
docker compose --profile dev up -d
```
You can access the server in `http://localhost:8080`.
All changes in the code will be automatically reloaded in the server.
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
### View the development server logs
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
```console
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
## Applying migrations
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
```console
poetry shell
cd src/backend
python manage.py migrate --database admin
```
## Apply fixtures
Fixtures are used to populate the database with initial development data.
```console
poetry shell
cd src/backend
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
```
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
## Run tests
Note that the tests will fail if you use the same `.env` file as the development environment.
For best results, run in a new shell with no environment variables set.
```console
poetry shell
cd src/backend
pytest
```

View File

@@ -1,125 +0,0 @@
services:
api:
build:
dockerfile: Dockerfile
image: prowler-api
env_file:
- path: ./.env
required: false
ports:
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
profiles:
- prod
depends_on:
postgres:
condition: service_healthy
valkey:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "prod"
api-dev:
build:
dockerfile: Dockerfile
target: dev
image: prowler-api-dev
environment:
- DJANGO_SETTINGS_MODULE=config.django.devel
- DJANGO_LOGGING_FORMATTER=human_readable
env_file:
- path: ./.env
required: false
ports:
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
volumes:
- "./src/backend:/home/prowler/backend"
- "./pyproject.toml:/home/prowler/pyproject.toml"
profiles:
- dev
depends_on:
postgres:
condition: service_healthy
valkey:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "dev"
postgres:
image: postgres:16.3-alpine
ports:
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
hostname: "postgres-db"
volumes:
- ./_data/postgres:/var/lib/postgresql/data
environment:
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
env_file:
- path: ./.env
required: false
healthcheck:
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
interval: 5s
timeout: 5s
retries: 5
valkey:
image: valkey/valkey:7-alpine3.19
ports:
- "${VALKEY_PORT:-6379}:6379"
hostname: "valkey"
volumes:
- ./_data/valkey:/data
env_file:
- path: ./.env
required: false
healthcheck:
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
interval: 10s
timeout: 5s
retries: 3
worker:
build:
dockerfile: Dockerfile
image: prowler-worker
environment:
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
env_file:
- path: ./.env
required: false
profiles:
- dev
- prod
depends_on:
valkey:
condition: service_healthy
postgres:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "worker"
worker-beat:
build:
dockerfile: Dockerfile
image: prowler-worker
environment:
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
env_file:
- path: ./.env
required: false
profiles:
- dev
- prod
depends_on:
valkey:
condition: service_healthy
postgres:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "beat"

View File

@@ -1,71 +0,0 @@
#!/bin/sh
apply_migrations() {
echo "Applying database migrations..."
poetry run python manage.py migrate --database admin
}
apply_fixtures() {
echo "Applying Django fixtures..."
for fixture in api/fixtures/dev/*.json; do
if [ -f "$fixture" ]; then
echo "Loading $fixture"
poetry run python manage.py loaddata "$fixture" --database admin
fi
done
}
start_dev_server() {
echo "Starting the development server..."
poetry run python manage.py runserver 0.0.0.0:"${DJANGO_PORT:-8080}"
}
start_prod_server() {
echo "Starting the Gunicorn server..."
poetry run gunicorn -c config/guniconf.py config.wsgi:application
}
start_worker() {
echo "Starting the worker..."
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
}
start_worker_beat() {
echo "Starting the worker-beat..."
sleep 15
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
}
manage_db_partitions() {
if [ "${DJANGO_MANAGE_DB_PARTITIONS}" = "True" ]; then
echo "Managing DB partitions..."
# For now we skip the deletion of partitions until we define the data retention policy
# --yes auto approves the operation without the need of an interactive terminal
poetry run python manage.py pgpartition --using admin --skip-delete --yes
fi
}
case "$1" in
dev)
apply_migrations
apply_fixtures
manage_db_partitions
start_dev_server
;;
prod)
apply_migrations
manage_db_partitions
start_prod_server
;;
worker)
start_worker
;;
beat)
start_worker_beat
;;
*)
echo "Usage: $0 {dev|prod|worker|beat}"
exit 1
;;
esac

View File

@@ -1,65 +0,0 @@
# Partitions
## Overview
Partitions are used to split the data in a table into smaller chunks, allowing for more efficient querying and storage.
The Prowler API uses partitions to store findings. The partitions are created based on the UUIDv7 `id` field.
You can use the Prowler API without ever creating additional partitions. This documentation is only relevant if you want to manage partitions to gain additional query performance.
### Required Postgres Configuration
There are 3 configuration options that need to be set in the `postgres.conf` file to get the most performance out of the partitioning:
- `enable_partition_pruning = on` (default is on)
- `enable_partitionwise_join = on` (default is off)
- `enable_partitionwise_aggregate = on` (default is off)
For more information on these options, see the [Postgres documentation](https://www.postgresql.org/docs/current/runtime-config-query.html).
## Partitioning Strategy
The partitioning strategy is defined in the `api.partitions` module. The strategy is responsible for creating and deleting partitions based on the provided configuration.
## Managing Partitions
The application will run without any extra work on your part. If you want to add or delete partitions, you can use the following commands:
To manage the partitions, run `python manage.py pgpartition --using admin`
This command will generate a list of partitions to create and delete based on the provided configuration.
By default, the command will prompt you to accept the changes before applying them.
```shell
Finding:
+ 2024_nov
name: 2024_nov
from_values: 0192e505-9000-72c8-a47c-cce719d8fb93
to_values: 01937f84-5418-7eb8-b2a6-e3be749e839d
size_unit: months
size_value: 1
+ 2024_dec
name: 2024_dec
from_values: 01937f84-5800-7b55-879c-9cdb46f023f6
to_values: 01941f29-7818-7f9f-b4be-20b05bb2f574
size_unit: months
size_value: 1
0 partitions will be deleted
2 partitions will be created
```
If you choose to apply the partitions, tables will be generated with the following format: `<table_name>_<year>_<month>`.
For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
### Changing the Partitioning Parameters
There are 4 environment variables that can be used to change the partitioning parameters:
- `DJANGO_MANAGE_DB_PARTITIONS`: Allow Django to manage database partitons. By default is set to `False`.
- `FINDINGS_TABLE_PARTITION_MONTHS`: Set the months for each partition. Setting the partition monts to 1 will create partitions with a size of 1 natural month.
- `FINDINGS_TABLE_PARTITION_COUNT`: Set the number of partitions to create
- `FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS`: Set the number of months to keep partitions before deleting them. Setting this to `None` will keep partitions indefinitely.

5073
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,55 +0,0 @@
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.poetry]
authors = ["Prowler Team"]
description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
name = "prowler-api"
package-mode = false
version = "1.1.0"
[tool.poetry.dependencies]
celery = {extras = ["pytest"], version = "^5.4.0"}
django = "5.1.4"
django-celery-beat = "^2.7.0"
django-celery-results = "^2.5.1"
django-cors-headers = "4.4.0"
django-environ = "0.11.2"
django-filter = "24.3"
django-guid = "3.5.0"
django-postgres-extra = "^2.0.8"
djangorestframework = "3.15.2"
djangorestframework-jsonapi = "7.0.2"
djangorestframework-simplejwt = "^5.3.1"
drf-nested-routers = "^0.94.1"
drf-spectacular = "0.27.2"
drf-spectacular-jsonapi = "0.5.1"
gunicorn = "23.0.0"
prowler = "^5.0"
psycopg2-binary = "2.9.9"
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
# Needed for prowler compatibility
python = ">=3.11,<3.13"
uuid6 = "2024.7.10"
[tool.poetry.group.dev.dependencies]
bandit = "1.7.9"
coverage = "7.5.4"
docker = "7.1.0"
freezegun = "1.5.1"
mypy = "1.10.1"
pylint = "3.2.5"
pytest = "8.2.2"
pytest-cov = "5.0.0"
pytest-django = "4.8.0"
pytest-env = "1.1.3"
pytest-randomly = "3.15.0"
pytest-xdist = "3.6.1"
ruff = "0.5.0"
safety = "3.2.9"
vulture = "2.14"
[tool.poetry.scripts]
celery = "src.backend.config.settings.celery"

View File

@@ -1,3 +0,0 @@
# from django.contrib import admin
# Register your models here.

View File

@@ -1,12 +0,0 @@
from django.apps import AppConfig
class ApiConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "api"
def ready(self):
from api import signals # noqa: F401
from api.compliance import load_prowler_compliance
load_prowler_compliance()

View File

@@ -1,152 +0,0 @@
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from rest_framework import permissions
from rest_framework.exceptions import NotAuthenticated
from rest_framework.filters import SearchFilter
from rest_framework_json_api import filters
from rest_framework_json_api.views import ModelViewSet
from rest_framework_simplejwt.authentication import JWTAuthentication
from api.db_router import MainRouter
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
from api.filters import CustomDjangoFilterBackend
from api.models import Role, Tenant
from api.rbac.permissions import HasPermissions
class BaseViewSet(ModelViewSet):
authentication_classes = [JWTAuthentication]
required_permissions = []
permission_classes = [permissions.IsAuthenticated, HasPermissions]
filter_backends = [
filters.QueryParameterValidationFilter,
filters.OrderingFilter,
CustomDjangoFilterBackend,
SearchFilter,
]
filterset_fields = []
search_fields = []
ordering_fields = "__all__"
ordering = ["id"]
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.set_required_permissions()
super().initial(request, *args, **kwargs)
def set_required_permissions(self):
"""This is an abstract method that must be implemented by subclasses."""
NotImplemented
def get_queryset(self):
raise NotImplementedError
class BaseRLSViewSet(BaseViewSet):
def dispatch(self, request, *args, **kwargs):
with transaction.atomic():
return super().dispatch(request, *args, **kwargs)
def initial(self, request, *args, **kwargs):
# Ideally, this logic would be in the `.setup()` method but DRF view sets don't call it
# https://docs.djangoproject.com/en/5.1/ref/class-based-views/base/#django.views.generic.base.View.setup
if request.auth is None:
raise NotAuthenticated
tenant_id = request.auth.get("tenant_id")
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)
def get_serializer_context(self):
context = super().get_serializer_context()
context["tenant_id"] = self.request.tenant_id
return context
class BaseTenantViewset(BaseViewSet):
def dispatch(self, request, *args, **kwargs):
with transaction.atomic():
tenant = super().dispatch(request, *args, **kwargs)
try:
# If the request is a POST, create the admin role
if request.method == "POST":
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
except Exception as e:
self._handle_creation_error(e, tenant)
raise
return tenant
def _create_admin_role(self, tenant_id):
Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant_id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
def _handle_creation_error(self, error, tenant):
if tenant.data.get("id"):
try:
Tenant.objects.using(MainRouter.admin_db).filter(
id=tenant.data["id"]
).delete()
except ObjectDoesNotExist:
pass # Tenant might not exist, handle gracefully
def initial(self, request, *args, **kwargs):
if (
request.resolver_match.url_name != "tenant-detail"
and request.method != "DELETE"
):
user_id = str(request.user.id)
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
return super().initial(request, *args, **kwargs)
# TODO: DRY this when we have time
if request.auth is None:
raise NotAuthenticated
tenant_id = request.auth.get("tenant_id")
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)
class BaseUserViewset(BaseViewSet):
def dispatch(self, request, *args, **kwargs):
with transaction.atomic():
return super().dispatch(request, *args, **kwargs)
def initial(self, request, *args, **kwargs):
# TODO refactor after improving RLS on users
if request.stream is not None and request.stream.method == "POST":
return super().initial(request, *args, **kwargs)
if request.auth is None:
raise NotAuthenticated
tenant_id = request.auth.get("tenant_id")
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)

View File

@@ -1,209 +0,0 @@
from types import MappingProxyType
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.models import CheckMetadata
from api.models import Provider
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
PROWLER_CHECKS = {}
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
"""
Retrieve all check IDs for the specified provider type.
This function fetches the check metadata for the given cloud provider
and returns an iterable of check IDs.
Args:
provider_type (Provider.ProviderChoices): The provider type
(e.g., 'aws', 'azure') for which to retrieve check IDs.
Returns:
Iterable[str]: An iterable of check IDs associated with the specified provider type.
"""
return CheckMetadata.get_bulk(provider_type).keys()
def get_prowler_provider_compliance(provider_type: Provider.ProviderChoices) -> dict:
"""
Retrieve the Prowler compliance data for a specified provider type.
This function fetches the compliance frameworks and their associated
requirements for the given cloud provider.
Args:
provider_type (Provider.ProviderChoices): The provider type
(e.g., 'aws', 'azure') for which to retrieve compliance data.
Returns:
dict: A dictionary mapping compliance framework names to their respective
Compliance objects for the specified provider.
"""
return Compliance.get_bulk(provider_type)
def load_prowler_compliance():
"""
Load and initialize the Prowler compliance data and checks for all provider types.
This function retrieves compliance data for all supported provider types,
generates a compliance overview template, and populates the global variables
`PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE` and `PROWLER_CHECKS` with read-only mappings
of the compliance templates and checks, respectively.
"""
global PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
global PROWLER_CHECKS
prowler_compliance = {
provider_type: get_prowler_provider_compliance(provider_type)
for provider_type in Provider.ProviderChoices.values
}
template = generate_compliance_overview_template(prowler_compliance)
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = MappingProxyType(template)
PROWLER_CHECKS = MappingProxyType(load_prowler_checks(prowler_compliance))
def load_prowler_checks(prowler_compliance):
"""
Generate a mapping of checks to the compliance frameworks that include them.
This function processes the provided compliance data and creates a dictionary
mapping each provider type to a dictionary where each check ID maps to a set
of compliance names that include that check.
Args:
prowler_compliance (dict): The compliance data for all provider types,
as returned by `get_prowler_provider_compliance`.
Returns:
dict: A nested dictionary where the first-level keys are provider types,
and the values are dictionaries mapping check IDs to sets of compliance names.
"""
checks = {}
for provider_type in Provider.ProviderChoices.values:
checks[provider_type] = {
check_id: set() for check_id in get_prowler_provider_checks(provider_type)
}
for compliance_name, compliance_data in prowler_compliance[
provider_type
].items():
for requirement in compliance_data.Requirements:
for check in requirement.Checks:
try:
checks[provider_type][check].add(compliance_name)
except KeyError:
continue
return checks
def generate_scan_compliance(
compliance_overview, provider_type: str, check_id: str, status: str
):
"""
Update the compliance overview with the status of a specific check.
This function updates the compliance overview by setting the status of the given check
within all compliance frameworks and requirements that include it. It then updates the
requirement status to 'FAIL' if any of its checks have failed, and adjusts the counts
of passed and failed requirements in the compliance overview.
Args:
compliance_overview (dict): The compliance overview data structure to update.
provider_type (str): The provider type (e.g., 'aws', 'azure') associated with the check.
check_id (str): The identifier of the check whose status is being updated.
status (str): The status of the check (e.g., 'PASS', 'FAIL', 'MUTED').
Returns:
None: This function modifies the compliance_overview in place.
"""
for compliance_id in PROWLER_CHECKS[provider_type][check_id]:
for requirement in compliance_overview[compliance_id]["requirements"].values():
if check_id in requirement["checks"]:
requirement["checks"][check_id] = status
requirement["checks_status"][status.lower()] += 1
if requirement["status"] != "FAIL" and any(
value == "FAIL" for value in requirement["checks"].values()
):
requirement["status"] = "FAIL"
compliance_overview[compliance_id]["requirements_status"]["passed"] -= 1
compliance_overview[compliance_id]["requirements_status"]["failed"] += 1
def generate_compliance_overview_template(prowler_compliance: dict):
"""
Generate a compliance overview template for all provider types.
This function creates a nested dictionary structure representing the compliance
overview template for each provider type, compliance framework, and requirement.
It initializes the status of all checks and requirements, and calculates initial
counts for requirements status.
Args:
prowler_compliance (dict): The compliance data for all provider types,
as returned by `get_prowler_provider_compliance`.
Returns:
dict: A nested dictionary representing the compliance overview template,
structured by provider type and compliance framework.
"""
template = {}
for provider_type in Provider.ProviderChoices.values:
provider_compliance = template.setdefault(provider_type, {})
compliance_data_dict = prowler_compliance[provider_type]
for compliance_name, compliance_data in compliance_data_dict.items():
compliance_requirements = {}
requirements_status = {"passed": 0, "failed": 0, "manual": 0}
total_requirements = 0
for requirement in compliance_data.Requirements:
total_requirements += 1
total_checks = len(requirement.Checks)
checks_dict = {check: None for check in requirement.Checks}
# Build requirement dictionary
requirement_dict = {
"name": requirement.Name or requirement.Id,
"description": requirement.Description,
"attributes": [
dict(attribute) for attribute in requirement.Attributes
],
"checks": checks_dict,
"checks_status": {
"pass": 0,
"fail": 0,
"manual": 0,
"total": total_checks,
},
"status": "PASS",
}
# Update requirements status
if total_checks == 0:
requirements_status["manual"] += 1
# Add requirement to compliance requirements
compliance_requirements[requirement.Id] = requirement_dict
# Calculate pending requirements
pending_requirements = total_requirements - requirements_status["manual"]
requirements_status["passed"] = pending_requirements
# Build compliance dictionary
compliance_dict = {
"framework": compliance_data.Framework,
"version": compliance_data.Version,
"provider": provider_type,
"description": compliance_data.Description,
"requirements": compliance_requirements,
"requirements_status": requirements_status,
"total_requirements": total_requirements,
}
# Add compliance to provider compliance
provider_compliance[compliance_name] = compliance_dict
return template

View File

@@ -1,18 +0,0 @@
class MainRouter:
default_db = "default"
admin_db = "admin"
def db_for_read(self, model, **hints): # noqa: F841
model_table_name = model._meta.db_table
if model_table_name.startswith("django_"):
return self.admin_db
return None
def db_for_write(self, model, **hints): # noqa: F841
model_table_name = model._meta.db_table
if model_table_name.startswith("django_"):
return self.admin_db
return None
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
return db == self.admin_db

View File

@@ -1,320 +0,0 @@
import secrets
import uuid
from contextlib import contextmanager
from datetime import datetime, timedelta, timezone
from django.conf import settings
from django.contrib.auth.models import BaseUserManager
from django.db import connection, models, transaction
from psycopg2 import connect as psycopg2_connect
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
from rest_framework_json_api.serializers import ValidationError
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
DB_PASSWORD = (
settings.DATABASES["default"]["PASSWORD"] if not settings.TESTING else "test"
)
DB_PROWLER_USER = (
settings.DATABASES["prowler_user"]["USER"] if not settings.TESTING else "test"
)
DB_PROWLER_PASSWORD = (
settings.DATABASES["prowler_user"]["PASSWORD"] if not settings.TESTING else "test"
)
TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
POSTGRES_TENANT_VAR = "api.tenant_id"
POSTGRES_USER_VAR = "api.user_id"
SET_CONFIG_QUERY = "SELECT set_config(%s, %s::text, TRUE);"
@contextmanager
def psycopg_connection(database_alias: str):
psycopg2_connection = None
try:
admin_db = settings.DATABASES[database_alias]
psycopg2_connection = psycopg2_connect(
dbname=admin_db["NAME"],
user=admin_db["USER"],
password=admin_db["PASSWORD"],
host=admin_db["HOST"],
port=admin_db["PORT"],
)
yield psycopg2_connection
finally:
if psycopg2_connection is not None:
psycopg2_connection.close()
@contextmanager
def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
"""
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
if the value is a valid UUID.
Args:
value (str): Database configuration parameter value.
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
"""
with transaction.atomic():
with connection.cursor() as cursor:
try:
# just in case the value is an UUID object
uuid.UUID(str(value))
except ValueError:
raise ValidationError("Must be a valid UUID")
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
yield cursor
class CustomUserManager(BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
if not email:
raise ValueError("The email field must be set")
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def get_by_natural_key(self, email):
return self.get(email__iexact=email)
def enum_to_choices(enum_class):
"""
This function converts a Python Enum to a list of tuples, where the first element is the value and the second element is the name.
It's for use with Django's `choices` attribute, which expects a list of tuples.
"""
return [(item.value, item.name.replace("_", " ").title()) for item in enum_class]
def one_week_from_now():
"""
Return a datetime object with a date one week from now.
"""
return datetime.now(timezone.utc) + timedelta(days=7)
def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
"""
Generate a random token with the specified length.
"""
_symbols = "23456789ABCDEFGHJKMNPQRSTVWXYZ"
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
def batch_delete(queryset, batch_size=5000):
"""
Deletes objects in batches and returns the total number of deletions and a summary.
Args:
queryset (QuerySet): The queryset of objects to delete.
batch_size (int): The number of objects to delete in each batch.
Returns:
tuple: (total_deleted, deletion_summary)
"""
total_deleted = 0
deletion_summary = {}
while True:
# Get a batch of IDs to delete
batch_ids = set(
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
)
if not batch_ids:
# No more objects to delete
break
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
total_deleted += deleted_count
for model_label, count in deleted_info.items():
deletion_summary[model_label] = deletion_summary.get(model_label, 0) + count
return total_deleted, deletion_summary
# Postgres Enums
class PostgresEnumMigration:
def __init__(self, enum_name: str, enum_values: tuple):
self.enum_name = enum_name
self.enum_values = enum_values
def create_enum_type(self, apps, schema_editor): # noqa: F841
string_enum_values = ", ".join([f"'{value}'" for value in self.enum_values])
with schema_editor.connection.cursor() as cursor:
cursor.execute(
f"CREATE TYPE {self.enum_name} AS ENUM ({string_enum_values});"
)
def drop_enum_type(self, apps, schema_editor): # noqa: F841
with schema_editor.connection.cursor() as cursor:
cursor.execute(f"DROP TYPE {self.enum_name};")
class PostgresEnumField(models.Field):
def __init__(self, enum_type_name, *args, **kwargs):
self.enum_type_name = enum_type_name
super().__init__(*args, **kwargs)
def db_type(self, connection):
return self.enum_type_name
def from_db_value(self, value, expression, connection): # noqa: F841
return value
def to_python(self, value):
if isinstance(value, EnumType):
return value.value
return value
def get_prep_value(self, value):
if isinstance(value, EnumType):
return value.value
return value
class EnumType:
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
def enum_adapter(enum_obj):
return AsIs(f"'{enum_obj.value}'::{enum_obj.__class__.enum_type_name}")
def get_enum_oid(connection, enum_type_name: str):
with connection.cursor() as cursor:
cursor.execute("SELECT oid FROM pg_type WHERE typname = %s;", (enum_type_name,))
result = cursor.fetchone()
if result is None:
raise ValueError(f"Enum type '{enum_type_name}' not found")
return result[0]
def register_enum(apps, schema_editor, enum_class): # noqa: F841
with psycopg_connection(schema_editor.connection.alias) as connection:
enum_oid = get_enum_oid(connection, enum_class.enum_type_name)
enum_instance = new_type(
(enum_oid,),
enum_class.enum_type_name,
lambda value, cur: value, # noqa: F841
)
register_type(enum_instance, connection)
register_adapter(enum_class, enum_adapter)
# Postgres enum definition for member role
class MemberRoleEnum(EnumType):
enum_type_name = "member_role"
class MemberRoleEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("member_role", *args, **kwargs)
# Postgres enum definition for Provider.provider
class ProviderEnum(EnumType):
enum_type_name = "provider"
class ProviderEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("provider", *args, **kwargs)
# Postgres enum definition for Scan.type
class ScanTriggerEnum(EnumType):
enum_type_name = "scan_trigger"
class ScanTriggerEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("scan_trigger", *args, **kwargs)
# Postgres enum definition for state
class StateEnum(EnumType):
enum_type_name = "state"
class StateEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("state", *args, **kwargs)
# Postgres enum definition for Finding.Delta
class FindingDeltaEnum(EnumType):
enum_type_name = "finding_delta"
class FindingDeltaEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("finding_delta", *args, **kwargs)
# Postgres enum definition for Severity
class SeverityEnum(EnumType):
enum_type_name = "severity"
class SeverityEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("severity", *args, **kwargs)
# Postgres enum definition for Status
class StatusEnum(EnumType):
enum_type_name = "status"
class StatusEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("status", *args, **kwargs)
# Postgres enum definition for Provider secrets type
class ProviderSecretTypeEnum(EnumType):
enum_type_name = "provider_secret_type"
class ProviderSecretTypeEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("provider_secret_type", *args, **kwargs)
# Postgres enum definition for Provider secrets type
class InvitationStateEnum(EnumType):
enum_type_name = "invitation_state"
class InvitationStateEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("invitation_state", *args, **kwargs)

View File

@@ -1,59 +0,0 @@
import uuid
from functools import wraps
from django.db import connection, transaction
from rest_framework_json_api.serializers import ValidationError
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
def set_tenant(func):
"""
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
This decorator extracts the `tenant_id` from the task's keyword arguments,
and uses it to set the tenant context for the current database session.
The `tenant_id` is then removed from the kwargs before the task function
is executed. If `tenant_id` is not provided, a KeyError is raised.
Args:
func (function): The Celery task function to be decorated.
Raises:
KeyError: If `tenant_id` is not found in the task's keyword arguments.
Returns:
function: The wrapped function with tenant context set.
Example:
# This decorator MUST be defined the last in the decorator chain
@shared_task
@set_tenant
def some_task(arg1, **kwargs):
# Task logic here
pass
# When calling the task
some_task.delay(arg1, tenant_id="8db7ca86-03cc-4d42-99f6-5e480baf6ab5")
# The tenant context will be set before the task logic executes.
"""
@wraps(func)
@transaction.atomic
def wrapper(*args, **kwargs):
try:
tenant_id = kwargs.pop("tenant_id")
except KeyError:
raise KeyError("This task requires the tenant_id")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
return func(*args, **kwargs)
return wrapper

View File

@@ -1,45 +0,0 @@
from django.core.exceptions import ValidationError as django_validation_error
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework_json_api.exceptions import exception_handler
from rest_framework_json_api.serializers import ValidationError
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
class ModelValidationError(ValidationError):
def __init__(
self,
detail: str | None = None,
code: str | None = None,
pointer: str | None = None,
status_code: int = 400,
):
super().__init__(
detail=[
{
"detail": detail,
"status": str(status_code),
"source": {"pointer": pointer},
"code": code,
}
]
)
class InvitationTokenExpiredException(APIException):
status_code = status.HTTP_410_GONE
default_detail = "The invitation token has expired and is no longer valid."
default_code = "token_expired"
def custom_exception_handler(exc, context):
if isinstance(exc, django_validation_error):
if hasattr(exc, "error_dict"):
exc = ValidationError(exc.message_dict)
else:
exc = ValidationError(detail=exc.messages[0], code=exc.code)
elif isinstance(exc, (TokenError, InvalidToken)):
exc.detail["messages"] = [
message_item["message"] for message_item in exc.detail["messages"]
]
return exception_handler(exc, context)

View File

@@ -1,604 +0,0 @@
from datetime import date, datetime, timezone
from django.conf import settings
from django.db.models import Q
from django_filters.rest_framework import (
BaseInFilter,
BooleanFilter,
CharFilter,
ChoiceFilter,
DateFilter,
FilterSet,
UUIDFilter,
)
from rest_framework_json_api.django_filters.backends import DjangoFilterBackend
from rest_framework_json_api.serializers import ValidationError
from api.db_utils import (
FindingDeltaEnumField,
InvitationStateEnumField,
ProviderEnumField,
SeverityEnumField,
StatusEnumField,
)
from api.models import (
ComplianceOverview,
Finding,
Invitation,
Membership,
PermissionChoices,
Provider,
ProviderGroup,
ProviderSecret,
Resource,
ResourceTag,
Role,
Scan,
ScanSummary,
SeverityChoices,
StateChoices,
StatusChoices,
Task,
User,
)
from api.rls import Tenant
from api.uuid_utils import (
datetime_to_uuid7,
transform_into_uuid7,
uuid7_end,
uuid7_range,
uuid7_start,
)
from api.v1.serializers import TaskBase
class CustomDjangoFilterBackend(DjangoFilterBackend):
def to_html(self, _request, _queryset, _view):
"""Override this method to use the Browsable API in dev environments.
This disables the HTML render for the default filter.
"""
return None
def get_filterset_class(self, view, queryset=None):
# Check if the view has 'get_filterset_class' method
if hasattr(view, "get_filterset_class"):
return view.get_filterset_class()
# Fallback to the default implementation
return super().get_filterset_class(view, queryset)
class UUIDInFilter(BaseInFilter, UUIDFilter):
pass
class CharInFilter(BaseInFilter, CharFilter):
pass
class ChoiceInFilter(BaseInFilter, ChoiceFilter):
pass
class TenantFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
class Meta:
model = Tenant
fields = {
"name": ["exact", "icontains"],
"inserted_at": ["date", "gte", "lte"],
"updated_at": ["gte", "lte"],
}
class MembershipFilter(FilterSet):
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
role = ChoiceFilter(choices=Membership.RoleChoices.choices)
class Meta:
model = Membership
fields = {
"tenant": ["exact"],
"role": ["exact"],
"date_joined": ["date", "gte", "lte"],
}
class ProviderFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
connected = BooleanFilter()
provider = ChoiceFilter(choices=Provider.ProviderChoices.choices)
class Meta:
model = Provider
fields = {
"provider": ["exact", "in"],
"id": ["exact", "in"],
"uid": ["exact", "icontains", "in"],
"alias": ["exact", "icontains", "in"],
"inserted_at": ["gte", "lte"],
"updated_at": ["gte", "lte"],
}
filter_overrides = {
ProviderEnumField: {
"filter_class": CharFilter,
},
}
class ProviderRelationshipFilterSet(FilterSet):
provider_type = ChoiceFilter(
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
)
provider_type__in = ChoiceInFilter(
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
)
provider_uid = CharFilter(field_name="provider__uid", lookup_expr="exact")
provider_uid__in = CharInFilter(field_name="provider__uid", lookup_expr="in")
provider_uid__icontains = CharFilter(
field_name="provider__uid", lookup_expr="icontains"
)
provider_alias = CharFilter(field_name="provider__alias", lookup_expr="exact")
provider_alias__in = CharInFilter(field_name="provider__alias", lookup_expr="in")
provider_alias__icontains = CharFilter(
field_name="provider__alias", lookup_expr="icontains"
)
class ProviderGroupFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
class Meta:
model = ProviderGroup
fields = {
"id": ["exact", "in"],
"name": ["exact", "in"],
"inserted_at": ["gte", "lte"],
"updated_at": ["gte", "lte"],
}
class ScanFilter(ProviderRelationshipFilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
completed_at = DateFilter(field_name="completed_at", lookup_expr="date")
started_at = DateFilter(field_name="started_at", lookup_expr="date")
next_scan_at = DateFilter(field_name="next_scan_at", lookup_expr="date")
trigger = ChoiceFilter(choices=Scan.TriggerChoices.choices)
state = ChoiceFilter(choices=StateChoices.choices)
state__in = ChoiceInFilter(
field_name="state", choices=StateChoices.choices, lookup_expr="in"
)
class Meta:
model = Scan
fields = {
"provider": ["exact", "in"],
"name": ["exact", "icontains"],
"started_at": ["gte", "lte"],
"next_scan_at": ["gte", "lte"],
"trigger": ["exact"],
}
class TaskFilter(FilterSet):
name = CharFilter(field_name="task_runner_task__task_name", lookup_expr="exact")
name__icontains = CharFilter(
field_name="task_runner_task__task_name", lookup_expr="icontains"
)
state = ChoiceFilter(
choices=StateChoices.choices, method="filter_state", lookup_expr="exact"
)
task_state_inverse_mapping_values = {
v: k for k, v in TaskBase.state_mapping.items()
}
def filter_state(self, queryset, name, value):
if value not in StateChoices:
raise ValidationError(
f"Invalid provider value: '{value}'. Valid values are: "
f"{', '.join(StateChoices)}"
)
return queryset.filter(
task_runner_task__status=self.task_state_inverse_mapping_values[value]
)
class Meta:
model = Task
fields = []
class ResourceTagFilter(FilterSet):
class Meta:
model = ResourceTag
fields = {
"key": ["exact", "icontains"],
"value": ["exact", "icontains"],
}
search = ["text_search"]
class ResourceFilter(ProviderRelationshipFilterSet):
tag_key = CharFilter(method="filter_tag_key")
tag_value = CharFilter(method="filter_tag_value")
tag = CharFilter(method="filter_tag")
tags = CharFilter(method="filter_tag")
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
class Meta:
model = Resource
fields = {
"provider": ["exact", "in"],
"uid": ["exact", "icontains"],
"name": ["exact", "icontains"],
"region": ["exact", "icontains", "in"],
"service": ["exact", "icontains", "in"],
"type": ["exact", "icontains", "in"],
"inserted_at": ["gte", "lte"],
"updated_at": ["gte", "lte"],
}
def filter_tag_key(self, queryset, name, value):
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
def filter_tag_value(self, queryset, name, value):
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
def filter_tag(self, queryset, name, value):
# We won't know what the user wants to filter on just based on the value,
# and we don't want to build special filtering logic for every possible
# provider tag spec, so we'll just do a full text search
return queryset.filter(tags__text_search=value)
class FindingFilter(FilterSet):
# We filter providers from the scan in findings
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
provider_type = ChoiceFilter(
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
)
provider_type__in = ChoiceInFilter(
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
)
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
provider_uid__icontains = CharFilter(
field_name="scan__provider__uid", lookup_expr="icontains"
)
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
provider_alias__in = CharInFilter(
field_name="scan__provider__alias", lookup_expr="in"
)
provider_alias__icontains = CharFilter(
field_name="scan__provider__alias", lookup_expr="icontains"
)
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
uid = CharFilter(field_name="uid")
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
status = ChoiceFilter(choices=StatusChoices.choices)
severity = ChoiceFilter(choices=SeverityChoices)
impact = ChoiceFilter(choices=SeverityChoices)
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
region = CharFilter(field_name="resources__region")
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
region__icontains = CharFilter(
field_name="resources__region", lookup_expr="icontains"
)
service = CharFilter(field_name="resources__service")
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
service__icontains = CharFilter(
field_name="resources__service", lookup_expr="icontains"
)
resource_uid = CharFilter(field_name="resources__uid")
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
resource_uid__icontains = CharFilter(
field_name="resources__uid", lookup_expr="icontains"
)
resource_name = CharFilter(field_name="resources__name")
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
resource_name__icontains = CharFilter(
field_name="resources__name", lookup_expr="icontains"
)
resource_type = CharFilter(field_name="resources__type")
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
resource_type__icontains = CharFilter(
field_name="resources__type", lookup_expr="icontains"
)
resource_tag_key = CharFilter(field_name="resources__tags__key")
resource_tag_key__in = CharInFilter(
field_name="resources__tags__key", lookup_expr="in"
)
resource_tag_key__icontains = CharFilter(
field_name="resources__tags__key", lookup_expr="icontains"
)
resource_tag_value = CharFilter(field_name="resources__tags__value")
resource_tag_value__in = CharInFilter(
field_name="resources__tags__value", lookup_expr="in"
)
resource_tag_value__icontains = CharFilter(
field_name="resources__tags__value", lookup_expr="icontains"
)
resource_tags = CharInFilter(
method="filter_resource_tag",
lookup_expr="in",
help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
"separated by commas.",
)
scan = UUIDFilter(method="filter_scan_id")
scan__in = UUIDInFilter(method="filter_scan_id_in")
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
inserted_at__gte = DateFilter(method="filter_inserted_at_gte")
inserted_at__lte = DateFilter(method="filter_inserted_at_lte")
class Meta:
model = Finding
fields = {
"id": ["exact", "in"],
"uid": ["exact", "in"],
"scan": ["exact", "in"],
"delta": ["exact", "in"],
"status": ["exact", "in"],
"severity": ["exact", "in"],
"impact": ["exact", "in"],
"check_id": ["exact", "in", "icontains"],
"inserted_at": ["date", "gte", "lte"],
"updated_at": ["gte", "lte"],
}
filter_overrides = {
FindingDeltaEnumField: {
"filter_class": CharFilter,
},
StatusEnumField: {
"filter_class": CharFilter,
},
SeverityEnumField: {
"filter_class": CharFilter,
},
}
@property
def qs(self):
# Force distinct results to prevent duplicates with many-to-many relationships
parent_qs = super().qs
return parent_qs.distinct()
# Convert filter values to UUIDv7 values for use with partitioning
def filter_scan_id(self, queryset, name, value):
try:
value_uuid = transform_into_uuid7(value)
start = uuid7_start(value_uuid)
end = uuid7_end(value_uuid, settings.FINDINGS_TABLE_PARTITION_MONTHS)
except ValidationError as validation_error:
detail = str(validation_error.detail[0])
raise ValidationError(
[
{
"detail": detail,
"status": 400,
"source": {"pointer": "/data/relationships/scan"},
"code": "invalid",
}
]
)
return (
queryset.filter(id__gte=start)
.filter(id__lt=end)
.filter(scan__id=value_uuid)
)
def filter_scan_id_in(self, queryset, name, value):
try:
uuid_list = [
transform_into_uuid7(value_uuid)
for value_uuid in value
if value_uuid is not None
]
start, end = uuid7_range(uuid_list)
except ValidationError as validation_error:
detail = str(validation_error.detail[0])
raise ValidationError(
[
{
"detail": detail,
"status": 400,
"source": {"pointer": "/data/relationships/scan"},
"code": "invalid",
}
]
)
if start == end:
return queryset.filter(id__gte=start).filter(scan__id__in=uuid_list)
else:
return (
queryset.filter(id__gte=start)
.filter(id__lt=end)
.filter(scan__id__in=uuid_list)
)
def filter_inserted_at(self, queryset, name, value):
value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(value))
return queryset.filter(id__gte=start).filter(inserted_at__date=value)
def filter_inserted_at_gte(self, queryset, name, value):
value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(value))
return queryset.filter(id__gte=start).filter(inserted_at__gte=value)
def filter_inserted_at_lte(self, queryset, name, value):
value = self.maybe_date_to_datetime(value)
end = uuid7_start(datetime_to_uuid7(value))
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
def filter_resource_tag(self, queryset, name, value):
overall_query = Q()
for key_value_pair in value:
tag_key, tag_value = key_value_pair.split(":", 1)
overall_query |= Q(
resources__tags__key__icontains=tag_key,
resources__tags__value__icontains=tag_value,
)
return queryset.filter(overall_query).distinct()
@staticmethod
def maybe_date_to_datetime(value):
dt = value
if isinstance(value, date):
dt = datetime.combine(value, datetime.min.time(), tzinfo=timezone.utc)
return dt
class ProviderSecretFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
provider = UUIDFilter(field_name="provider__id", lookup_expr="exact")
class Meta:
model = ProviderSecret
fields = {
"name": ["exact", "icontains"],
}
class InvitationFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
expires_at = DateFilter(field_name="expires_at", lookup_expr="date")
state = ChoiceFilter(choices=Invitation.State.choices)
state__in = ChoiceInFilter(choices=Invitation.State.choices, lookup_expr="in")
class Meta:
model = Invitation
fields = {
"email": ["exact", "icontains"],
"inserted_at": ["date", "gte", "lte"],
"updated_at": ["date", "gte", "lte"],
"expires_at": ["date", "gte", "lte"],
"inviter": ["exact"],
}
filter_overrides = {
InvitationStateEnumField: {
"filter_class": CharFilter,
}
}
class UserFilter(FilterSet):
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
class Meta:
model = User
fields = {
"name": ["exact", "icontains"],
"email": ["exact", "icontains"],
"company_name": ["exact", "icontains"],
"date_joined": ["date", "gte", "lte"],
"is_active": ["exact"],
}
class RoleFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
permission_state = ChoiceFilter(
choices=PermissionChoices.choices, method="filter_permission_state"
)
def filter_permission_state(self, queryset, name, value):
return Role.filter_by_permission_state(queryset, value)
class Meta:
model = Role
fields = {
"id": ["exact", "in"],
"name": ["exact", "in"],
"inserted_at": ["gte", "lte"],
"updated_at": ["gte", "lte"],
}
class ComplianceOverviewFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
scan_id = UUIDFilter(field_name="scan__id")
class Meta:
model = ComplianceOverview
fields = {
"inserted_at": ["date", "gte", "lte"],
"compliance_id": ["exact", "icontains"],
"framework": ["exact", "iexact", "icontains"],
"version": ["exact", "icontains"],
"region": ["exact", "icontains", "in"],
}
class ScanSummaryFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
provider_type = ChoiceFilter(
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
)
provider_type__in = ChoiceInFilter(
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
)
region = CharFilter(field_name="region")
muted_findings = BooleanFilter(method="filter_muted_findings")
def filter_muted_findings(self, queryset, name, value):
if not value:
return queryset.exclude(muted__gt=0)
return queryset
class Meta:
model = ScanSummary
fields = {
"inserted_at": ["date", "gte", "lte"],
"region": ["exact", "icontains", "in"],
}
class ServiceOverviewFilter(ScanSummaryFilter):
muted_findings = None
def is_valid(self):
# Check if at least one of the inserted_at filters is present
inserted_at_filters = [
self.data.get("inserted_at"),
self.data.get("inserted_at__gte"),
self.data.get("inserted_at__lte"),
]
if not any(inserted_at_filters):
raise ValidationError(
{
"inserted_at": [
"At least one of filter[inserted_at], filter[inserted_at__gte], or "
"filter[inserted_at__lte] is required."
]
}
)
return super().is_valid()

View File

@@ -1,28 +0,0 @@
[
{
"model": "api.user",
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"fields": {
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
"last_login": null,
"name": "Devie Prowlerson",
"email": "dev@prowler.com",
"company_name": "Prowler Developers",
"is_active": true,
"date_joined": "2024-09-17T09:04:20.850Z"
}
},
{
"model": "api.user",
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
"fields": {
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
"last_login": null,
"name": "Devietoo Prowlerson",
"email": "dev2@prowler.com",
"company_name": "Prowler Developers",
"is_active": true,
"date_joined": "2024-09-18T09:04:20.850Z"
}
}
]

View File

@@ -1,50 +0,0 @@
[
{
"model": "api.tenant",
"pk": "12646005-9067-4d2a-a098-8bb378604362",
"fields": {
"inserted_at": "2024-03-21T23:00:00Z",
"updated_at": "2024-03-21T23:00:00Z",
"name": "Tenant1"
}
},
{
"model": "api.tenant",
"pk": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
"fields": {
"inserted_at": "2024-03-21T23:00:00Z",
"updated_at": "2024-03-21T23:00:00Z",
"name": "Tenant2"
}
},
{
"model": "api.membership",
"pk": "2b0db93a-7e0b-4edf-a851-ea448676b7eb",
"fields": {
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"tenant": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
"role": "owner",
"date_joined": "2024-09-19T11:03:59.712Z"
}
},
{
"model": "api.membership",
"pk": "797d7cee-abc9-4598-98bb-4bf4bfb97f27",
"fields": {
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "owner",
"date_joined": "2024-09-19T11:02:59.712Z"
}
},
{
"model": "api.membership",
"pk": "dea37563-7009-4dcf-9f18-25efb41462a7",
"fields": {
"user": "b6493a3a-c997-489b-8b99-278bf74de9f6",
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "member",
"date_joined": "2024-09-19T11:03:59.712Z"
}
}
]

View File

@@ -1,177 +0,0 @@
[
{
"model": "api.provider",
"pk": "37b065f8-26b0-4218-a665-0b23d07b27d9",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-08-01T17:20:27.050Z",
"updated_at": "2024-08-01T17:20:27.050Z",
"provider": "gcp",
"uid": "a12322-test321",
"alias": "gcp_testing_2",
"connected": null,
"connection_last_checked_at": null,
"metadata": {}
}
},
{
"model": "api.provider",
"pk": "8851db6b-42e5-4533-aa9e-30a32d67e875",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-08-01T17:19:42.453Z",
"updated_at": "2024-08-01T17:19:42.453Z",
"provider": "gcp",
"uid": "a12345-test123",
"alias": "gcp_testing_1",
"connected": null,
"connection_last_checked_at": null,
"metadata": {}
}
},
{
"model": "api.provider",
"pk": "b85601a8-4b45-4194-8135-03fb980ef428",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-08-01T17:19:09.556Z",
"updated_at": "2024-08-01T17:19:09.556Z",
"provider": "aws",
"uid": "123456789020",
"alias": "aws_testing_2",
"connected": null,
"connection_last_checked_at": null,
"metadata": {}
}
},
{
"model": "api.provider",
"pk": "baa7b895-8bac-4f47-b010-4226d132856e",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-08-01T17:20:16.962Z",
"updated_at": "2024-08-01T17:20:16.962Z",
"provider": "gcp",
"uid": "a12322-test123",
"alias": "gcp_testing_3",
"connected": null,
"connection_last_checked_at": null,
"metadata": {}
}
},
{
"model": "api.provider",
"pk": "d7c7ea89-d9af-423b-a364-1290dcad5a01",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-08-01T17:18:58.132Z",
"updated_at": "2024-08-01T17:18:58.132Z",
"provider": "aws",
"uid": "123456789015",
"alias": "aws_testing_1",
"connected": null,
"connection_last_checked_at": null,
"metadata": {}
}
},
{
"model": "api.provider",
"pk": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-08-06T16:03:26.176Z",
"updated_at": "2024-08-06T16:03:26.176Z",
"provider": "azure",
"uid": "8851db6b-42e5-4533-aa9e-30a32d67e875",
"alias": "azure_testing",
"connected": null,
"connection_last_checked_at": null,
"metadata": {},
"scanner_args": {}
}
},
{
"model": "api.provider",
"pk": "26e55a24-cb2c-4cef-ac87-6f91fddb2c97",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-08-06T16:03:07.037Z",
"updated_at": "2024-08-06T16:03:07.037Z",
"provider": "kubernetes",
"uid": "kubernetes-test-12345",
"alias": "k8s_testing",
"connected": null,
"connection_last_checked_at": null,
"metadata": {},
"scanner_args": {}
}
},
{
"model": "api.provider",
"pk": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:45:26.352Z",
"updated_at": "2024-10-18T11:16:23.533Z",
"provider": "aws",
"uid": "106908755759",
"alias": "real testing aws provider",
"connected": true,
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
"metadata": {},
"scanner_args": {}
}
},
{
"model": "api.providersecret",
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-11T08:03:05.026Z",
"updated_at": "2024-10-11T08:04:47.033Z",
"name": "GCP static secrets",
"secret_type": "static",
"_secret": "Z0FBQUFBQm5DTndmZW9KakRZUHM2UHhQN2V3RzN0QmM1cERham8yMHp5cnVTT0lzdGFyS1FuVmJXUlpYSGsyU0cxR3RMMEdQYXlYMUVsaWtqLU1OZWlaVUp6OFREYlotZTVBY3BuTlZYbm9YcUJydzAxV2p5dkpLamI1Y2tUYzA0MmJUNWxsNTBRM0E1SDRCa0pPQWVlb05YU3dfeUhkLTRmOEh3dGczOGh1ZGhQcVdZdVAtYmtoSWlwNXM4VGFoVmF3dno2X1hrbk5GZjZTWjVuWEdEZUFXeHJSQjEzbTlVakhNdzYyWTdiVEpvUEc2MTNpRzUtczhEank1eGI0b3MyMlAyaGN6dlByZmtUWHByaDNUYWFqYS1tYnNBUkRKTzBacFNSRjFuVmd5bUtFUEJhd1ZVS1ZDd2xSUV9PaEtLTnc0XzVkY2lhM01WTjQwaWdJSk9wNUJSXzQ4RUNQLXFPNy1VdzdPYkZyWkVkU3RyQjVLTS1MVHN0R3k4THNKZ2NBNExaZnl3Q1EwN2dwNGRsUXptMjB0LXUzTUpzTDE2Q1hmS0ZSN2g1ZjBPeV8taFoxNUwxc2FEcktXX0dCM1IzeUZTTHNiTmNxVXBvNWViZTJScUVWV2VYTFQ4UHlid21PY1A0UjdNMGtERkZCd0lLMlJENDMzMVZUM09DQ0twd1N3VHlZd09XLUctOWhYcFJIR1p5aUlZeEUzejc2dWRYdGNsd0xOODNqRUFEczhSTWNtWU0tdFZ1ZTExaHNHUVYtd0Zxdld1LTdKVUNINzlZTGdHODhKeVVpQmRZMHRUNTJRRWhwS1F1Y3I2X2Iwc0c1NHlXSVRLZWxreEt0dVRnOTZFMkptU2VMS1dWXzdVOVRzMUNUWXM2aFlxVDJXdGo3d2cxSVZGWlI2ZWhIZzZBcEl4bEJ6UnVHc0RYWVNHcjFZUHI5ZUYyWG9rSlo0QUVSUkFCX3h2UmtJUTFzVXJUZ25vTmk2VzdoTTNta05ucmNfTi0yR1ZxN1E2MnZJOVVKOGxmMXMzdHMxVndmSVhQbUItUHgtMVpVcHJwMU5JVHJLb0Y1aHV5OEEwS0kzQkEtcFJkdkRnWGxmZnprNFhndWg1TmQyd09yTFdTRmZ3d2ZvZFUtWXp4a2VYb3JjckFIcE13MDUzX0RHSnlzM0N2ZE5IRzJzMXFMc0k4MDRyTHdLZFlWOG9SaFF0LU43Ynd6VFlEcVNvdFZ0emJEVk10aEp4dDZFTFNFNzk0UUo2WTlVLWRGYm1fanZHaFZreHBIMmtzVjhyS0xPTk9fWHhiVTJHQXZwVlVuY3JtSjFUYUdHQzhEaHFNZXhwUHBmY0kxaUVrOHo4a0FYOTdpZVJDbFRvdFlQeWo3eFZHX1ZMZ1Myc3prU3o2c3o2eXNja1U4N0Y1T0d1REVjZFRGNTByUkgyemVCSjlQYkY2bmJ4YTZodHB0cUNzd2xZcENycUdsczBIaEZPbG1jVUlqNlM2cEE3aGpVaWswTzBDLVFGUHM5UHhvM09saWNtaDhaNVlsc3FZdktKeWlheDF5OGhTODE2N3JWamdTZG5Fa3JSQ2ZUSEVfRjZOZXdreXRZLTBZRFhleVFFeC1YUzc0cWhYeEhobGxvdnZ3Rm15WFlBWXp0dm1DeTA5eExLeEFRRXVRSXBXdTNEaWdZZ3JDenItdDhoZlFiTzI0SGZ1c01FR1FNaFVweVBKR1YxWGRUMW1Mc2JVdW9raWR6UHk2ZTBnS05pV3oyZVBjREdkY3k4ZHZPUWE5S281MkJRSHF3NnpTclZ5bl90bk1wUEh6Tkp5dXlDcE5paWRqcVhxRFVObWIzRldWOGJ2aC1CRHZpbFZrb0hjNGpCMm5POGRiS2lETUpMLUVfQlhCdTZPLW9USW1LTFlTSF9zRUJYZ1NKeFFEQjNOR215ZXJDbkFndmcxWl9rWlk9",
"provider": "8851db6b-42e5-4533-aa9e-30a32d67e875"
}
},
{
"model": "api.providersecret",
"pk": "40191ad5-d8c2-40a9-826d-241397626b68",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-10T11:11:44.515Z",
"updated_at": "2024-10-11T07:59:56.102Z",
"name": "AWS static secrets",
"secret_type": "static",
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
"provider": "b85601a8-4b45-4194-8135-03fb980ef428"
}
},
{
"model": "api.providersecret",
"pk": "ed89d1ea-366a-4d12-a602-f2ab77019742",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-10T11:11:44.515Z",
"updated_at": "2024-10-11T07:59:56.102Z",
"name": "Azure static secrets",
"secret_type": "static",
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2"
}
},
{
"model": "api.providersecret",
"pk": "ae48ecde-75cd-4814-92ab-18f48719e5d9",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:45:26.412Z",
"updated_at": "2024-10-18T10:45:26.412Z",
"name": "Valid AWS Credentials",
"secret_type": "static",
"_secret": "Z0FBQUFBQm5FanhHa3dXS0I3M2NmWm56SktiaGNqdDZUN0xQU1QwUi15QkhLZldFUmRENk1BXzlscG9JSUxVSTF5ekxuMkdEanlJNjhPUS1VSV9wVTBvU2l4ZnNGOVJhYW93RC1LTEhmc2pyOTJvUWwyWnpFY19WN1pRQk5IdDYwYnBDQnF1eU9nUzdwTGU3QU5qMGFyX1E4SXdpSk9paGVLcVpOVUhwb3duaXgxZ0ZxME5Pcm40QzBGWEZKY2lmRVlCMGFuVFVzemxuVjVNalZVQ2JsY2ZqNWt3Z01IYUZ0dk92YkdtSUZ5SlBvQWZoVU5DWlRFWmExNnJGVEY4Q1Bnd2VJUW9TSWdRcG9rSDNfREQwRld3Q1RYVnVYWVJLWWIxZmpsWGpwd0xQM0dtLTlYUjdHOVhhNklLWXFGTHpFQUVyVmNhYW9CU0tocGVyX3VjMkVEcVdjdFBfaVpsLTBzaUxrWTlta3dpelNtTG9xYVhBUHUzNUE4RnI1WXdJdHcxcFVfaG1XRHhDVFBKamxJb1FaQ2lsQ3FzRmxZbEJVemVkT1E2aHZfbDJqWDJPT3ViOWJGYzQ3eTNWNlFQSHBWRDFiV2tneDM4SmVqMU9Bd01TaXhPY2dmWG5RdENURkM2b2s5V3luVUZQcnFKNldnWEdYaWE2MnVNQkEwMHd6cUY5cVJkcGw4bHBtNzhPeHhkREdwSXNEc1JqQkxUR1FYRTV0UFNwbVlVSWF5LWgtbVhJZXlPZ0Q4cG9HX2E0Qld0LTF1TTFEVy1XNGdnQTRpLWpQQmFJUEdaOFJGNDVoUVJnQ25YVU5DTENMaTY4YmxtYWJFRERXTjAydVN2YnBDb3RkUE0zSDRlN1A3TXc4d2h1Wmd0LWUzZEcwMUstNUw2YnFyS2Z0NEVYMXllQW5GLVBpeU55SkNhczFIeFhrWXZpVXdwSFVrTDdiQjQtWHZJdERXVThzSnJsT2FNZzJDaUt6Y2NXYUZhUlo3VkY0R1BrSHNHNHprTmxjYmp1TXVKakRha0VtNmRFZWRmZHJWdnRCOVNjVGFVWjVQM3RwWWl4SkNmOU1pb2xqMFdOblhNY3Y3aERpOHFlWjJRc2dtRDkzZm1Qc29wdk5OQmJPbGk5ZUpGM1I2YzRJN2gxR3FEMllXR1pma1k0emVqSjZyMUliMGZsc3NfSlVDbGt4QzJTc3hHOU9FRHlZb09zVnlvcDR6WC1uclRSenI0Yy13WlFWNzJWRkwydjhmSjFZdnZ5X3NmZVF6UWRNMXo5STVyV3B0d09UUlFtOURITGhXSDVIUl9zYURJc05KWUNxekVyYkxJclNFNV9leEk4R2xsMGJod3lYeFIwaXR2dllwLTZyNWlXdDRpRkxVYkxWZFdvYUhKck5aeElBZUtKejNKS2tYVW1rTnVrRjJBQmdlZmV6ckozNjNwRmxLS1FaZzRVTTBZYzFFYi1idjBpZkQ3bWVvbEdRZXJrWFNleWZmSmFNdG1wQlp0YmxjWDV5T0tEbHRsYnNHbjRPRjl5MkttOUhRWlJtd1pmTnY4Z1lPRlZoTzFGVDdTZ0RDY1ByV0RndTd5LUNhcHNXUnNIeXdLMEw3WS1tektRTWFLQy1zakpMLWFiM3FOakE1UWU4LXlOX2VPbmd4MTZCRk9OY3Z4UGVDSWxhRlg4eHI4X1VUTDZZM0pjV0JDVi1UUjlTUl85cm1LWlZ0T1dzU0lpdWUwbXgtZ0l6eHNSNExRTV9MczJ6UkRkVElnRV9Rc0RoTDFnVHRZSEFPb2paX200TzZiRzVmRE5hOW5CTjh5Qi1WaEtueEpqRzJDY1luVWZtX1pseUpQSE5lQ0RrZ05EbWo5cU9MZ0ZkcXlqUll4UUkyejRfY2p4RXdEeC1PS1JIQVNUcmNIdkRJbzRiUktMWEQxUFM3aGNzeVFWUDdtcm5xNHlOYUU9",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555"
}
}
]

View File

@@ -1,218 +0,0 @@
[
{
"model": "api.scan",
"pk": "0191e280-9d2f-71c8-9b18-487a23ba185e",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "37b065f8-26b0-4218-a665-0b23d07b27d9",
"trigger": "manual",
"name": "test scan 1",
"state": "completed",
"unique_resource_count": 1,
"duration": 5,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-01T17:25:27.050Z",
"started_at": "2024-09-01T17:25:27.050Z",
"updated_at": "2024-09-01T17:25:27.050Z",
"completed_at": "2024-09-01T17:25:32.050Z"
}
},
{
"model": "api.scan",
"pk": "01920573-aa9c-73c9-bcda-f2e35c9b19d2",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
"trigger": "manual",
"name": "test aws scan 2",
"state": "completed",
"unique_resource_count": 1,
"duration": 20,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-02T17:24:27.050Z",
"started_at": "2024-09-02T17:24:27.050Z",
"updated_at": "2024-09-02T17:24:27.050Z",
"completed_at": "2024-09-01T17:24:37.050Z"
}
},
{
"model": "api.scan",
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
"trigger": "manual",
"name": "test gcp scan",
"state": "completed",
"unique_resource_count": 10,
"duration": 10,
"scanner_args": {
"checks_to_execute": [
"cloudsql_instance_automated_backups"
]
},
"inserted_at": "2024-09-02T19:26:27.050Z",
"started_at": "2024-09-02T19:26:27.050Z",
"updated_at": "2024-09-02T19:26:27.050Z",
"completed_at": "2024-09-01T17:26:37.050Z"
}
},
{
"model": "api.scan",
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
"trigger": "manual",
"name": "test aws scan",
"state": "completed",
"unique_resource_count": 1,
"duration": 35,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-02T19:27:27.050Z",
"started_at": "2024-09-02T19:27:27.050Z",
"updated_at": "2024-09-02T19:27:27.050Z",
"completed_at": "2024-09-01T17:27:37.050Z"
}
},
{
"model": "api.scan",
"pk": "c281c924-23f3-4fcc-ac63-73a22154b7de",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
"trigger": "scheduled",
"name": "test scheduled aws scan",
"state": "available",
"scanner_args": {
"checks_to_execute": [
"cloudformation_stack_outputs_find_secrets"
]
},
"scheduled_at": "2030-09-02T19:20:27.050Z",
"inserted_at": "2024-09-02T19:24:27.050Z",
"updated_at": "2024-09-02T19:24:27.050Z"
}
},
{
"model": "api.scan",
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
"trigger": "scheduled",
"name": "test scheduled aws scan 2",
"state": "available",
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled",
"cloudformation_stack_outputs_find_secrets"
]
},
"scheduled_at": "2030-08-02T19:31:27.050Z",
"inserted_at": "2024-09-02T19:38:27.050Z",
"updated_at": "2024-09-02T19:38:27.050Z"
}
},
{
"model": "api.scan",
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
"trigger": "scheduled",
"name": "test scheduled gcp scan",
"state": "available",
"scanner_args": {
"checks_to_execute": [
"cloudsql_instance_automated_backups",
"iam_audit_logs_enabled"
]
},
"scheduled_at": "2030-07-02T19:30:27.050Z",
"inserted_at": "2024-09-02T19:29:27.050Z",
"updated_at": "2024-09-02T19:29:27.050Z"
}
},
{
"model": "api.scan",
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
"trigger": "scheduled",
"name": "test scheduled azure scan",
"state": "available",
"scanner_args": {
"checks_to_execute": [
"aks_cluster_rbac_enabled",
"defender_additional_email_configured_with_a_security_contact"
]
},
"scheduled_at": "2030-08-05T19:32:27.050Z",
"inserted_at": "2024-09-02T19:29:27.050Z",
"updated_at": "2024-09-02T19:29:27.050Z"
}
},
{
"model": "api.scan",
"pk": "01929f3b-ed2e-7623-ad63-7c37cd37828f",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "real scan 1",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"trigger": "manual",
"state": "completed",
"unique_resource_count": 19,
"progress": 100,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"duration": 7,
"scheduled_at": null,
"inserted_at": "2024-10-18T10:45:57.678Z",
"updated_at": "2024-10-18T10:46:05.127Z",
"started_at": "2024-10-18T10:45:57.909Z",
"completed_at": "2024-10-18T10:46:05.127Z"
}
},
{
"model": "api.scan",
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "real scan 2",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"trigger": "manual",
"state": "completed",
"unique_resource_count": 20,
"progress": 100,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled",
"account_security_contact_information_is_registered"
]
},
"duration": 4,
"scheduled_at": null,
"inserted_at": "2024-10-18T11:16:21.358Z",
"updated_at": "2024-10-18T11:16:26.060Z",
"started_at": "2024-10-18T11:16:21.593Z",
"completed_at": "2024-10-18T11:16:26.060Z"
}
}
]

View File

@@ -1,322 +0,0 @@
[
{
"model": "api.resource",
"pk": "0234477d-0b8e-439f-87d3-ce38dff3a434",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.772Z",
"updated_at": "2024-10-18T11:16:24.466Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root",
"name": "",
"region": "eu-south-2",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-south':6C 'iam':3A 'other':11 'root':5A 'south':8C"
}
},
{
"model": "api.resource",
"pk": "17ce30a3-6e77-42a5-bb08-29dfcad7396a",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.882Z",
"updated_at": "2024-10-18T11:16:24.533Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root2",
"name": "",
"region": "eu-west-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
}
},
{
"model": "api.resource",
"pk": "1f9de587-ba5b-415a-b9b0-ceed4c6c9f32",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.091Z",
"updated_at": "2024-10-18T11:16:24.637Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root3",
"name": "",
"region": "ap-northeast-2",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
}
},
{
"model": "api.resource",
"pk": "29b35668-6dad-411d-bfec-492311889892",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.008Z",
"updated_at": "2024-10-18T11:16:24.600Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root4",
"name": "",
"region": "us-west-2",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
}
},
{
"model": "api.resource",
"pk": "30505514-01d4-42bb-8b0c-471bbab27460",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:26.014Z",
"updated_at": "2024-10-18T11:16:26.023Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root5",
"name": "",
"region": "us-east-1",
"service": "account",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'account':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
}
},
{
"model": "api.resource",
"pk": "372932f0-e4df-4968-9721-bb4f6236fae4",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.848Z",
"updated_at": "2024-10-18T11:16:24.516Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root6",
"name": "",
"region": "eu-west-3",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
}
},
{
"model": "api.resource",
"pk": "3a37d124-7637-43f6-9df7-e9aa7ef98c53",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.979Z",
"updated_at": "2024-10-18T11:16:24.585Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root7",
"name": "",
"region": "sa-east-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'sa':7C 'sa-east':6C"
}
},
{
"model": "api.resource",
"pk": "3c49318e-03c6-4f12-876f-40451ce7de3d",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.072Z",
"updated_at": "2024-10-18T11:16:24.630Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root8",
"name": "",
"region": "ap-southeast-2",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
}
},
{
"model": "api.resource",
"pk": "430bf313-8733-4bc5-ac70-5402adfce880",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.994Z",
"updated_at": "2024-10-18T11:16:24.593Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root9",
"name": "",
"region": "eu-north-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-north':6C 'iam':3A 'north':8C 'other':11 'root':5A"
}
},
{
"model": "api.resource",
"pk": "78bd2a52-82f9-45df-90a9-4ad78254fdc4",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.055Z",
"updated_at": "2024-10-18T11:16:24.622Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root10",
"name": "",
"region": "ap-northeast-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
}
},
{
"model": "api.resource",
"pk": "7973e332-795e-4a74-b4d4-a53a21c98c80",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.896Z",
"updated_at": "2024-10-18T11:16:24.542Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root11",
"name": "",
"region": "us-east-2",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
}
},
{
"model": "api.resource",
"pk": "8ca0a188-5699-436e-80fd-e566edaeb259",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.938Z",
"updated_at": "2024-10-18T11:16:24.565Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root12",
"name": "",
"region": "ca-central-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'ca':7C 'ca-central':6C 'central':8C 'iam':3A 'other':11 'root':5A"
}
},
{
"model": "api.resource",
"pk": "8fe4514f-71d7-46ab-b0dc-70cef23b4d13",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.965Z",
"updated_at": "2024-10-18T11:16:24.578Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root13",
"name": "",
"region": "eu-west-2",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
}
},
{
"model": "api.resource",
"pk": "9ab35225-dc7c-4ebd-bbc0-d81fb5d9de77",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.909Z",
"updated_at": "2024-10-18T11:16:24.549Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root14",
"name": "",
"region": "ap-south-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-south':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'south':8C"
}
},
{
"model": "api.resource",
"pk": "9be26c1d-adf0-4ba8-9ca9-c740f4a0dc4e",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.863Z",
"updated_at": "2024-10-18T11:16:24.524Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root15",
"name": "",
"region": "eu-central-2",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
}
},
{
"model": "api.resource",
"pk": "ba108c01-bcad-44f1-b211-c1d8985da89d",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.110Z",
"updated_at": "2024-10-18T11:16:24.644Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root16",
"name": "",
"region": "ap-northeast-3",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
}
},
{
"model": "api.resource",
"pk": "dc6cfb5d-6835-4c7b-9152-c18c734a6eaa",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.038Z",
"updated_at": "2024-10-18T11:16:24.615Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root17",
"name": "",
"region": "eu-central-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
}
},
{
"model": "api.resource",
"pk": "e0664164-cfda-44a4-b743-acee1c69386c",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.924Z",
"updated_at": "2024-10-18T11:16:24.557Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root18",
"name": "",
"region": "us-west-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
}
},
{
"model": "api.resource",
"pk": "e1929daa-a984-4116-8131-492a48321dba",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.023Z",
"updated_at": "2024-10-18T11:16:24.607Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:iam::112233445566:root19",
"name": "",
"region": "ap-southeast-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
}
},
{
"model": "api.resource",
"pk": "e37bb1f1-1669-4bb3-be86-e3378ddfbcba",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.952Z",
"updated_at": "2024-10-18T11:16:24.571Z",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"uid": "arn:aws:access-analyzer:us-east-1:112233445566:analyzer/ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
"name": "",
"region": "us-east-1",
"service": "accessanalyzer",
"type": "Other",
"text_search": "'1':9A,15C '112233445566':10A 'access':4A 'access-analyzer':3A 'accessanalyzer':16 'analyzer':5A 'analyzer/consoleanalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c':11A 'arn':1A 'aws':2A 'east':8A,14C 'other':17 'us':7A,13C 'us-east':6A,12C"
}
}
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,153 +0,0 @@
[
{
"model": "api.providergroup",
"pk": "3fe28fb8-e545-424c-9b8f-69aff638f430",
"fields": {
"name": "first_group",
"inserted_at": "2024-11-13T11:36:19.503Z",
"updated_at": "2024-11-13T11:36:19.503Z",
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
}
},
{
"model": "api.providergroup",
"pk": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
"fields": {
"name": "second_group",
"inserted_at": "2024-11-13T11:36:25.421Z",
"updated_at": "2024-11-13T11:36:25.421Z",
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
}
},
{
"model": "api.providergroup",
"pk": "481769f5-db2b-447b-8b00-1dee18db90ec",
"fields": {
"name": "third_group",
"inserted_at": "2024-11-13T11:36:37.603Z",
"updated_at": "2024-11-13T11:36:37.603Z",
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
}
},
{
"model": "api.providergroupmembership",
"pk": "13625bd3-f428-4021-ac1b-b0bd41b6e02f",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
"inserted_at": "2024-11-13T11:55:17.138Z"
}
},
{
"model": "api.providergroupmembership",
"pk": "54784ebe-42d2-4937-aa6a-e21c62879567",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
"inserted_at": "2024-11-13T11:55:17.138Z"
}
},
{
"model": "api.providergroupmembership",
"pk": "c8bd52d5-42a5-48fe-8e0a-3eef154b8ebe",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
"inserted_at": "2024-11-13T11:55:41.237Z"
}
},
{
"model": "api.role",
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "admin_test",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": true,
"manage_scans": true,
"unlimited_visibility": true,
"inserted_at": "2024-11-20T15:32:42.402Z",
"updated_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.role",
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "first_role",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": false,
"manage_scans": false,
"unlimited_visibility": true,
"inserted_at": "2024-11-20T15:31:53.239Z",
"updated_at": "2024-11-20T15:31:53.239Z"
}
},
{
"model": "api.role",
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "third_role",
"manage_users": false,
"manage_account": false,
"manage_billing": false,
"manage_providers": false,
"manage_integrations": false,
"manage_scans": true,
"unlimited_visibility": false,
"inserted_at": "2024-11-20T15:34:05.440Z",
"updated_at": "2024-11-20T15:34:05.440Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.userrolerelationship",
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"inserted_at": "2024-11-20T15:36:14.302Z"
}
}
]

File diff suppressed because one or more lines are too long

View File

@@ -1,49 +0,0 @@
import logging
import time
from config.custom_logging import BackendLogger
def extract_auth_info(request) -> dict:
if getattr(request, "auth", None) is not None:
tenant_id = request.auth.get("tenant_id", "N/A")
user_id = request.auth.get("sub", "N/A")
else:
tenant_id, user_id = "N/A", "N/A"
return {"tenant_id": tenant_id, "user_id": user_id}
class APILoggingMiddleware:
"""
Middleware for logging API requests.
This middleware logs details of API requests, including the typical request metadata among other useful information.
Args:
get_response (Callable): A callable to get the response, typically the next middleware or view.
"""
def __init__(self, get_response):
self.get_response = get_response
self.logger = logging.getLogger(BackendLogger.API)
def __call__(self, request):
request_start_time = time.time()
response = self.get_response(request)
duration = time.time() - request_start_time
auth_info = extract_auth_info(request)
self.logger.info(
"",
extra={
"user_id": auth_info["user_id"],
"tenant_id": auth_info["tenant_id"],
"method": request.method,
"path": request.path,
"query_params": request.GET.dict(),
"status_code": response.status_code,
"duration": duration,
},
)
return response

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +0,0 @@
from django.conf import settings
from django.db import migrations
from api.db_utils import DB_PROWLER_USER
DB_NAME = settings.DATABASES["default"]["NAME"]
class Migration(migrations.Migration):
dependencies = [
("api", "0001_initial"),
("token_blacklist", "0012_alter_outstandingtoken_user"),
]
operations = [
migrations.RunSQL(
f"""
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_blacklistedtoken TO {DB_PROWLER_USER};
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_outstandingtoken TO {DB_PROWLER_USER};
GRANT SELECT, DELETE ON django_admin_log TO {DB_PROWLER_USER};
"""
),
]

View File

@@ -1,23 +0,0 @@
# Generated by Django 5.1.1 on 2024-12-20 13:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0002_token_migrations"),
]
operations = [
migrations.RemoveConstraint(
model_name="provider",
name="unique_provider_uids",
),
migrations.AddConstraint(
model_name="provider",
constraint=models.UniqueConstraint(
fields=("tenant_id", "provider", "uid", "is_deleted"),
name="unique_provider_uids",
),
),
]

View File

@@ -1,248 +0,0 @@
# Generated by Django 5.1.1 on 2024-12-05 12:29
import uuid
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0003_update_provider_unique_constraint_with_is_deleted"),
]
operations = [
migrations.CreateModel(
name="Role",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("name", models.CharField(max_length=255)),
("manage_users", models.BooleanField(default=False)),
("manage_account", models.BooleanField(default=False)),
("manage_billing", models.BooleanField(default=False)),
("manage_providers", models.BooleanField(default=False)),
("manage_integrations", models.BooleanField(default=False)),
("manage_scans", models.BooleanField(default=False)),
("unlimited_visibility", models.BooleanField(default=False)),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "roles",
},
),
migrations.CreateModel(
name="RoleProviderGroupRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_provider_group_relationship",
},
),
migrations.CreateModel(
name="UserRoleRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_user_relationship",
},
),
migrations.AddField(
model_name="roleprovidergrouprelationship",
name="provider_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
),
),
migrations.AddField(
model_name="roleprovidergrouprelationship",
name="role",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
migrations.AddField(
model_name="role",
name="provider_groups",
field=models.ManyToManyField(
related_name="roles",
through="api.RoleProviderGroupRelationship",
to="api.providergroup",
),
),
migrations.AddField(
model_name="userrolerelationship",
name="role",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
migrations.AddField(
model_name="userrolerelationship",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="role",
name="users",
field=models.ManyToManyField(
related_name="roles",
through="api.UserRoleRelationship",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AddConstraint(
model_name="roleprovidergrouprelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "provider_group_id"),
name="unique_role_provider_group_relationship",
),
),
migrations.AddConstraint(
model_name="roleprovidergrouprelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_roleprovidergrouprelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="userrolerelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "user_id"), name="unique_role_user_relationship"
),
),
migrations.AddConstraint(
model_name="userrolerelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_userrolerelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="role",
constraint=models.UniqueConstraint(
fields=("tenant_id", "name"), name="unique_role_per_tenant"
),
),
migrations.AddConstraint(
model_name="role",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_role",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.CreateModel(
name="InvitationRoleRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"invitation",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
),
),
(
"role",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_invitation_relationship",
},
),
migrations.AddConstraint(
model_name="invitationrolerelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "invitation_id"),
name="unique_role_invitation_relationship",
),
),
migrations.AddConstraint(
model_name="invitationrolerelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_invitationrolerelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddField(
model_name="role",
name="invitations",
field=models.ManyToManyField(
related_name="roles",
through="api.InvitationRoleRelationship",
to="api.invitation",
),
),
]

View File

@@ -1,44 +0,0 @@
from django.db import migrations
from api.db_router import MainRouter
def create_admin_role(apps, schema_editor):
Tenant = apps.get_model("api", "Tenant")
Role = apps.get_model("api", "Role")
User = apps.get_model("api", "User")
UserRoleRelationship = apps.get_model("api", "UserRoleRelationship")
for tenant in Tenant.objects.using(MainRouter.admin_db).all():
admin_role, _ = Role.objects.using(MainRouter.admin_db).get_or_create(
name="admin",
tenant=tenant,
defaults={
"manage_users": True,
"manage_account": True,
"manage_billing": True,
"manage_providers": True,
"manage_integrations": True,
"manage_scans": True,
"unlimited_visibility": True,
},
)
users = User.objects.using(MainRouter.admin_db).filter(
membership__tenant=tenant
)
for user in users:
UserRoleRelationship.objects.using(MainRouter.admin_db).get_or_create(
user=user,
role=admin_role,
tenant=tenant,
)
class Migration(migrations.Migration):
dependencies = [
("api", "0004_rbac"),
]
operations = [
migrations.RunPython(create_admin_role),
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +0,0 @@
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
page_size = 50
max_page_size = 100

View File

@@ -1,203 +0,0 @@
from datetime import datetime, timezone
from typing import Generator, Optional
from dateutil.relativedelta import relativedelta
from django.conf import settings
from psqlextra.partitioning import (
PostgresPartitioningManager,
PostgresRangePartition,
PostgresRangePartitioningStrategy,
PostgresTimePartitionSize,
PostgresPartitioningError,
)
from psqlextra.partitioning.config import PostgresPartitioningConfig
from uuid6 import UUID
from api.models import Finding, ResourceFindingMapping
from api.rls import RowLevelSecurityConstraint
from api.uuid_utils import datetime_to_uuid7
class PostgresUUIDv7RangePartition(PostgresRangePartition):
def __init__(
self,
from_values: UUID,
to_values: UUID,
size: PostgresTimePartitionSize,
name_format: Optional[str] = None,
**kwargs,
) -> None:
self.from_values = from_values
self.to_values = to_values
self.size = size
self.name_format = name_format
self.rls_statements = None
if "rls_statements" in kwargs:
self.rls_statements = kwargs["rls_statements"]
start_timestamp_ms = self.from_values.time
self.start_datetime = datetime.fromtimestamp(
start_timestamp_ms / 1000, timezone.utc
)
def name(self) -> str:
if not self.name_format:
raise PostgresPartitioningError("Unknown size/unit")
return self.start_datetime.strftime(self.name_format).lower()
def deconstruct(self) -> dict:
return {
**super().deconstruct(),
"size_unit": self.size.unit.value,
"size_value": self.size.value,
}
def create(
self,
model,
schema_editor,
comment,
) -> None:
super().create(model, schema_editor, comment)
# if this model has RLS statements, add them to the partition
if isinstance(self.rls_statements, list):
schema_editor.add_constraint(
model,
constraint=RowLevelSecurityConstraint(
"tenant_id",
name=f"rls_on_{self.name()}",
partition_name=self.name(),
statements=self.rls_statements,
),
)
class PostgresUUIDv7PartitioningStrategy(PostgresRangePartitioningStrategy):
def __init__(
self,
size: PostgresTimePartitionSize,
count: int,
start_date: datetime = None,
max_age: Optional[relativedelta] = None,
name_format: Optional[str] = None,
**kwargs,
) -> None:
self.start_date = start_date.replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
self.size = size
self.count = count
self.max_age = max_age
self.name_format = name_format
self.rls_statements = None
if "rls_statements" in kwargs:
self.rls_statements = kwargs["rls_statements"]
def to_create(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
current_datetime = (
self.start_date if self.start_date else self.get_start_datetime()
)
for _ in range(self.count):
end_datetime = (
current_datetime + self.size.as_delta() - relativedelta(microseconds=1)
)
start_uuid7 = datetime_to_uuid7(current_datetime)
end_uuid7 = datetime_to_uuid7(end_datetime)
yield PostgresUUIDv7RangePartition(
from_values=start_uuid7,
to_values=end_uuid7,
size=self.size,
name_format=self.name_format,
rls_statements=self.rls_statements,
)
current_datetime += self.size.as_delta()
def to_delete(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
if not self.max_age:
return
current_datetime = self.get_start_datetime() - self.max_age
while True:
end_datetime = current_datetime + self.size.as_delta()
start_uuid7 = datetime_to_uuid7(current_datetime)
end_uuid7 = datetime_to_uuid7(end_datetime)
# dropping table will delete indexes and policies
yield PostgresUUIDv7RangePartition(
from_values=start_uuid7,
to_values=end_uuid7,
size=self.size,
name_format=self.name_format,
)
current_datetime -= self.size.as_delta()
def get_start_datetime(self) -> datetime:
"""
Gets the start of the current month in UTC timezone.
This function returns a `datetime` object set to the first day of the current
month, at midnight (00:00:00), in UTC.
Returns:
datetime: A `datetime` object representing the start of the current month in UTC.
"""
return datetime.now(timezone.utc).replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
def relative_days_or_none(value):
if value is None:
return None
return relativedelta(days=value)
#
# To manage the partitions, run `python manage.py pgpartition --using admin`
#
# For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
manager = PostgresPartitioningManager(
[
PostgresPartitioningConfig(
model=Finding,
strategy=PostgresUUIDv7PartitioningStrategy(
start_date=datetime.now(timezone.utc),
size=PostgresTimePartitionSize(
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
),
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
max_age=relative_days_or_none(
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
),
name_format="%Y_%b",
rls_statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
# ResourceFindingMapping should always follow the Finding partitioning
PostgresPartitioningConfig(
model=ResourceFindingMapping,
strategy=PostgresUUIDv7PartitioningStrategy(
start_date=datetime.now(timezone.utc),
size=PostgresTimePartitionSize(
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
),
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
max_age=relative_days_or_none(
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
),
name_format="%Y_%b",
rls_statements=["SELECT"],
),
),
]
)

View File

@@ -1,75 +0,0 @@
from enum import Enum
from typing import Optional
from django.db.models import QuerySet
from rest_framework.permissions import BasePermission
from api.db_router import MainRouter
from api.models import Provider, Role, User
class Permissions(Enum):
MANAGE_USERS = "manage_users"
MANAGE_ACCOUNT = "manage_account"
MANAGE_BILLING = "manage_billing"
MANAGE_PROVIDERS = "manage_providers"
MANAGE_INTEGRATIONS = "manage_integrations"
MANAGE_SCANS = "manage_scans"
UNLIMITED_VISIBILITY = "unlimited_visibility"
class HasPermissions(BasePermission):
"""
Custom permission to check if the user's role has the required permissions.
The required permissions should be specified in the view as a list in `required_permissions`.
"""
def has_permission(self, request, view):
required_permissions = getattr(view, "required_permissions", [])
if not required_permissions:
return True
user_roles = (
User.objects.using(MainRouter.admin_db).get(id=request.user.id).roles.all()
)
if not user_roles:
return False
for perm in required_permissions:
if not getattr(user_roles[0], perm.value, False):
return False
return True
def get_role(user: User) -> Optional[Role]:
"""
Retrieve the first role assigned to the given user.
Returns:
The user's first Role instance if the user has any roles, otherwise None.
"""
return user.roles.first()
def get_providers(role: Role) -> QuerySet[Provider]:
"""
Return a distinct queryset of Providers accessible by the given role.
If the role has no associated provider groups, an empty queryset is returned.
Args:
role: A Role instance.
Returns:
A QuerySet of Provider objects filtered by the role's provider groups.
If the role has no provider groups, returns an empty queryset.
"""
tenant = role.tenant
provider_groups = role.provider_groups.all()
if not provider_groups.exists():
return Provider.objects.none()
return Provider.objects.filter(
tenant=tenant, provider_groups__in=provider_groups
).distinct()

View File

@@ -1,23 +0,0 @@
from contextlib import nullcontext
from rest_framework_json_api.renderers import JSONRenderer
from api.db_utils import rls_transaction
class APIJSONRenderer(JSONRenderer):
"""JSONRenderer override to apply tenant RLS when there are included resources in the request."""
def render(self, data, accepted_media_type=None, renderer_context=None):
request = renderer_context.get("request")
tenant_id = getattr(request, "tenant_id", None) if request else None
include_param_present = "include" in request.query_params if request else False
# Use rls_transaction if needed for included resources, otherwise do nothing
context_manager = (
rls_transaction(tenant_id)
if tenant_id and include_param_present
else nullcontext()
)
with context_manager:
return super().render(data, accepted_media_type, renderer_context)

View File

@@ -1,188 +0,0 @@
from typing import Any
from uuid import uuid4
from django.core.exceptions import ValidationError
from django.db import DEFAULT_DB_ALIAS
from django.db import models
from django.db.backends.ddl_references import Statement, Table
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
class Tenant(models.Model):
"""
The Tenant is the basic grouping in the system. It is used to separate data between customers.
"""
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
name = models.CharField(max_length=100)
class Meta:
db_table = "tenants"
class JSONAPIMeta:
resource_name = "tenants"
class RowLevelSecurityConstraint(models.BaseConstraint):
"""
Model constraint to enforce row-level security on a tenant based model, in addition to the least privileges.
The constraint can be applied to a partitioned table by specifying the `partition_name` keyword argument.
"""
rls_sql_query = """
ALTER TABLE %(table_name)s ENABLE ROW LEVEL SECURITY;
ALTER TABLE %(table_name)s FORCE ROW LEVEL SECURITY;
"""
policy_sql_query = """
CREATE POLICY %(db_user)s_%(table_name)s_{statement}
ON %(table_name)s
FOR {statement}
TO %(db_user)s
{clause} (
CASE
WHEN current_setting('%(tenant_setting)s', True) IS NULL THEN FALSE
ELSE %(field_column)s = current_setting('%(tenant_setting)s')::uuid
END
);
"""
grant_sql_query = """
GRANT {statement} ON %(table_name)s TO %(db_user)s;
"""
drop_sql_query = """
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
"""
drop_policy_sql_query = """
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
"""
def __init__(
self, field: str, name: str, statements: list | None = None, **kwargs
) -> None:
super().__init__(name=name)
self.target_field: str = field
self.statements = statements or ["SELECT"]
self.partition_name = None
if "partition_name" in kwargs:
self.partition_name = kwargs["partition_name"]
def create_sql(self, model: Any, schema_editor: Any) -> Any:
field_column = schema_editor.quote_name(self.target_field)
policy_queries = ""
grant_queries = ""
for statement in self.statements:
clause = f"{'WITH CHECK' if statement == 'INSERT' else 'USING'}"
policy_queries = f"{policy_queries}{self.policy_sql_query.format(statement=statement, clause=clause)}"
grant_queries = (
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
)
full_create_sql_query = (
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
)
table_name = model._meta.db_table
if self.partition_name:
table_name = f"{table_name}_{self.partition_name}"
return Statement(
full_create_sql_query,
table_name=table_name,
field_column=field_column,
db_user=DB_USER,
tenant_setting=POSTGRES_TENANT_VAR,
partition_name=self.partition_name,
)
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
field_column = schema_editor.quote_name(self.target_field)
full_drop_sql_query = (
f"{self.drop_sql_query}"
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
)
table_name = model._meta.db_table
if self.partition_name:
table_name = f"{table_name}_{self.partition_name}"
return Statement(
full_drop_sql_query,
table_name=Table(table_name, schema_editor.quote_name),
field_column=field_column,
db_user=DB_USER,
partition_name=self.partition_name,
)
def __eq__(self, other: object) -> bool:
if isinstance(other, RowLevelSecurityConstraint):
return self.name == other.name and self.target_field == other.target_field
return super().__eq__(other)
def deconstruct(self) -> tuple[str, tuple, dict]:
path, _, kwargs = super().deconstruct()
return (path, (self.target_field,), kwargs)
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS): # noqa: F841
if not hasattr(instance, "tenant_id"):
raise ValidationError(f"{model.__name__} does not have a tenant_id field.")
class BaseSecurityConstraint(models.BaseConstraint):
"""Model constraint to grant the least privileges to the API database user."""
grant_sql_query = """
GRANT {statement} ON %(table_name)s TO %(db_user)s;
"""
drop_sql_query = """
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
"""
def __init__(self, name: str, statements: list | None = None) -> None:
super().__init__(name=name)
self.statements = statements or ["SELECT"]
def create_sql(self, model: Any, schema_editor: Any) -> Any:
grant_queries = ""
for statement in self.statements:
grant_queries = (
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
)
return Statement(
grant_queries,
table_name=model._meta.db_table,
db_user=DB_USER,
)
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
return Statement(
self.drop_sql_query,
table_name=Table(model._meta.db_table, schema_editor.quote_name),
db_user=DB_USER,
)
def __eq__(self, other: object) -> bool:
if isinstance(other, BaseSecurityConstraint):
return self.name == other.name
return super().__eq__(other)
def deconstruct(self) -> tuple[str, tuple, dict]:
path, args, kwargs = super().deconstruct()
return path, args, kwargs
class RowLevelSecurityProtectedModel(models.Model):
tenant = models.ForeignKey("Tenant", on_delete=models.CASCADE)
class Meta:
abstract = True

View File

@@ -1,35 +0,0 @@
from celery import states
from celery.signals import before_task_publish
from django.db.models.signals import post_delete
from django.dispatch import receiver
from django_celery_beat.models import PeriodicTask
from django_celery_results.backends.database import DatabaseBackend
from api.models import Provider
from config.celery import celery_app
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
"""Celery signal to store TaskResult entries when tasks reach the broker."""
db_result_backend = DatabaseBackend(celery_app)
request = type("request", (object,), headers)
db_result_backend.store_result(
headers["id"],
None,
states.PENDING,
traceback=None,
request=request,
)
before_task_publish.connect(
create_task_result_on_publish, dispatch_uid="create_task_result_on_publish"
)
@receiver(post_delete, sender=Provider)
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
# Delete the associated periodic task when the provider is deleted
task_name = f"scan-perform-scheduled-{instance.id}"
PeriodicTask.objects.filter(name=task_name).delete()

File diff suppressed because it is too large Load Diff

View File

@@ -1,179 +0,0 @@
import pytest
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
from django.urls import reverse
from rest_framework.test import APIClient
@pytest.mark.django_db
def test_basic_authentication():
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "test_password"
# Check that a 401 is returned when no basic authentication is provided
no_auth_response = client.get(reverse("provider-list"))
assert no_auth_response.status_code == 401
# Check that we can create a new user without any kind of authentication
user_creation_response = client.post(
reverse("user-list"),
data={
"data": {
"type": "users",
"attributes": {
"name": "test",
"email": test_user,
"password": test_password,
},
}
},
format="vnd.api+json",
)
assert user_creation_response.status_code == 201
# Check that using our new user's credentials we can authenticate and get the providers
access_token, _ = get_api_tokens(client, test_user, test_password)
auth_headers = get_authorization_header(access_token)
auth_response = client.get(
reverse("provider-list"),
headers=auth_headers,
)
assert auth_response.status_code == 200
@pytest.mark.django_db
def test_refresh_token(create_test_user, tenants_fixture):
client = APIClient()
# Assert that we can obtain a new access token using the refresh one
access_token, refresh_token = get_api_tokens(
client, create_test_user.email, TEST_PASSWORD
)
valid_refresh_response = client.post(
reverse("token-refresh"),
data={
"data": {
"type": "tokens-refresh",
"attributes": {"refresh": refresh_token},
}
},
format="vnd.api+json",
)
assert valid_refresh_response.status_code == 200
assert (
valid_refresh_response.json()["data"]["attributes"]["refresh"] != refresh_token
)
# Assert the former refresh token gets invalidated
invalid_refresh_response = client.post(
reverse("token-refresh"),
data={
"data": {
"type": "tokens-refresh",
"attributes": {"refresh": refresh_token},
}
},
format="vnd.api+json",
)
assert invalid_refresh_response.status_code == 400
# Assert that the new refresh token could be used
new_refresh_response = client.post(
reverse("token-refresh"),
data={
"data": {
"type": "tokens-refresh",
"attributes": {
"refresh": valid_refresh_response.json()["data"]["attributes"][
"refresh"
]
},
}
},
format="vnd.api+json",
)
assert new_refresh_response.status_code == 200
@pytest.mark.django_db
def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fixture):
client = APIClient()
role = roles_fixture[0]
user1_email = "user1@testing.com"
user2_email = "user2@testing.com"
password = "thisisapassword123"
user1_response = client.post(
reverse("user-list"),
data={
"data": {
"type": "users",
"attributes": {
"name": "user1",
"email": user1_email,
"password": password,
},
}
},
format="vnd.api+json",
)
assert user1_response.status_code == 201
user1_access_token, _ = get_api_tokens(client, user1_email, password)
user1_headers = get_authorization_header(user1_access_token)
user2_invitation = client.post(
reverse("invitation-list"),
data={
"data": {
"type": "invitations",
"attributes": {"email": user2_email},
"relationships": {
"roles": {
"data": [
{
"type": "roles",
"id": str(role.id),
}
]
}
},
}
},
format="vnd.api+json",
headers=user1_headers,
)
assert user2_invitation.status_code == 201
invitation_token = user2_invitation.json()["data"]["attributes"]["token"]
user2_response = client.post(
reverse("user-list") + f"?invitation_token={invitation_token}",
data={
"data": {
"type": "users",
"attributes": {
"name": "user2",
"email": user2_email,
"password": password,
},
}
},
format="vnd.api+json",
)
assert user2_response.status_code == 201
user2_access_token, _ = get_api_tokens(client, user2_email, password)
user2_headers = get_authorization_header(user2_access_token)
user1_me = client.get(reverse("user-me"), headers=user1_headers)
assert user1_me.status_code == 200
assert user1_me.json()["data"]["attributes"]["email"] == user1_email
user2_me = client.get(reverse("user-me"), headers=user2_headers)
assert user2_me.status_code == 200
assert user2_me.json()["data"]["attributes"]["email"] == user2_email

View File

@@ -1,85 +0,0 @@
from unittest.mock import Mock, patch
import pytest
from conftest import get_api_tokens, get_authorization_header
from django.urls import reverse
from rest_framework.test import APIClient
from api.models import Provider
@patch("api.v1.views.Task.objects.get")
@patch("api.v1.views.delete_provider_task.delay")
@pytest.mark.django_db
def test_delete_provider_without_executing_task(
mock_delete_task, mock_task_get, create_test_user, tenants_fixture, tasks_fixture
):
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "test_password"
prowler_task = tasks_fixture[0]
task_mock = Mock()
task_mock.id = prowler_task.id
mock_delete_task.return_value = task_mock
mock_task_get.return_value = prowler_task
user_creation_response = client.post(
reverse("user-list"),
data={
"data": {
"type": "users",
"attributes": {
"name": "test",
"email": test_user,
"password": test_password,
},
}
},
format="vnd.api+json",
)
assert user_creation_response.status_code == 201
access_token, _ = get_api_tokens(client, test_user, test_password)
auth_headers = get_authorization_header(access_token)
create_provider_response = client.post(
reverse("provider-list"),
data={
"data": {
"type": "providers",
"attributes": {
"provider": Provider.ProviderChoices.AWS,
"uid": "123456789012",
},
}
},
format="vnd.api+json",
headers=auth_headers,
)
assert create_provider_response.status_code == 201
provider_id = create_provider_response.json()["data"]["id"]
provider_uid = create_provider_response.json()["data"]["attributes"]["uid"]
remove_provider = client.delete(
reverse("provider-detail", kwargs={"pk": provider_id}),
headers=auth_headers,
)
assert remove_provider.status_code == 202
recreate_provider_response = client.post(
reverse("provider-list"),
data={
"data": {
"type": "providers",
"attributes": {
"provider": Provider.ProviderChoices.AWS,
"uid": provider_uid,
},
}
},
format="vnd.api+json",
headers=auth_headers,
)
assert recreate_provider_response.status_code == 201

View File

@@ -1,98 +0,0 @@
from unittest.mock import patch
import pytest
from django.urls import reverse
from conftest import TEST_USER, TEST_PASSWORD, get_api_tokens, get_authorization_header
@patch("api.v1.views.schedule_provider_scan")
@pytest.mark.django_db
def test_check_resources_between_different_tenants(
schedule_mock,
enforce_test_user_db_connection,
authenticated_api_client,
tenants_fixture,
set_user_admin_roles_fixture,
):
client = authenticated_api_client
tenant1 = str(tenants_fixture[0].id)
tenant2 = str(tenants_fixture[1].id)
tenant1_token, _ = get_api_tokens(
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant1
)
tenant2_token, _ = get_api_tokens(
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant2
)
tenant1_headers = get_authorization_header(tenant1_token)
tenant2_headers = get_authorization_header(tenant2_token)
# Create a provider on tenant 1
provider_data = {
"data": {
"type": "providers",
"attributes": {
"alias": "test_provider_tenant_1",
"provider": "aws",
"uid": "123456789012",
},
}
}
provider1_response = client.post(
reverse("provider-list"),
data=provider_data,
format="vnd.api+json",
headers=tenant1_headers,
)
assert provider1_response.status_code == 201
provider1_id = provider1_response.json()["data"]["id"]
# Create a provider on tenant 2
provider_data = {
"data": {
"type": "providers",
"attributes": {
"alias": "test_provider_tenant_2",
"provider": "aws",
"uid": "123456789013",
},
}
}
provider2_response = client.post(
reverse("provider-list"),
data=provider_data,
format="vnd.api+json",
headers=tenant2_headers,
)
assert provider2_response.status_code == 201
provider2_id = provider2_response.json()["data"]["id"]
# Try to get the provider from tenant 1 on tenant 2 and vice versa
tenant1_response = client.get(
reverse("provider-detail", kwargs={"pk": provider1_id}),
headers=tenant2_headers,
)
assert tenant1_response.status_code == 404
tenant2_response = client.get(
reverse("provider-detail", kwargs={"pk": provider1_id}),
headers=tenant1_headers,
)
assert tenant2_response.status_code == 200
assert tenant2_response.json()["data"]["id"] == provider1_id
# Vice versa
tenant2_response = client.get(
reverse("provider-detail", kwargs={"pk": provider2_id}),
headers=tenant1_headers,
)
assert tenant2_response.status_code == 404
tenant1_response = client.get(
reverse("provider-detail", kwargs={"pk": provider2_id}),
headers=tenant2_headers,
)
assert tenant1_response.status_code == 200
assert tenant1_response.json()["data"]["id"] == provider2_id

View File

@@ -1,284 +0,0 @@
from unittest.mock import patch, MagicMock
from api.compliance import (
get_prowler_provider_checks,
get_prowler_provider_compliance,
load_prowler_compliance,
load_prowler_checks,
generate_scan_compliance,
generate_compliance_overview_template,
)
from api.models import Provider
class TestCompliance:
@patch("api.compliance.CheckMetadata")
def test_get_prowler_provider_checks(self, mock_check_metadata):
provider_type = Provider.ProviderChoices.AWS
mock_check_metadata.get_bulk.return_value = {
"check1": MagicMock(),
"check2": MagicMock(),
"check3": MagicMock(),
}
checks = get_prowler_provider_checks(provider_type)
assert set(checks) == {"check1", "check2", "check3"}
mock_check_metadata.get_bulk.assert_called_once_with(provider_type)
@patch("api.compliance.Compliance")
def test_get_prowler_provider_compliance(self, mock_compliance):
provider_type = Provider.ProviderChoices.AWS
mock_compliance.get_bulk.return_value = {
"compliance1": MagicMock(),
"compliance2": MagicMock(),
}
compliance_data = get_prowler_provider_compliance(provider_type)
assert compliance_data == mock_compliance.get_bulk.return_value
mock_compliance.get_bulk.assert_called_once_with(provider_type)
@patch("api.models.Provider.ProviderChoices")
@patch("api.compliance.get_prowler_provider_compliance")
@patch("api.compliance.generate_compliance_overview_template")
@patch("api.compliance.load_prowler_checks")
def test_load_prowler_compliance(
self,
mock_load_prowler_checks,
mock_generate_compliance_overview_template,
mock_get_prowler_provider_compliance,
mock_provider_choices,
):
mock_provider_choices.values = ["aws", "azure"]
compliance_data_aws = {"compliance_aws": MagicMock()}
compliance_data_azure = {"compliance_azure": MagicMock()}
compliance_data_dict = {
"aws": compliance_data_aws,
"azure": compliance_data_azure,
}
def mock_get_compliance(provider_type):
return compliance_data_dict[provider_type]
mock_get_prowler_provider_compliance.side_effect = mock_get_compliance
mock_generate_compliance_overview_template.return_value = {
"template_key": "template_value"
}
mock_load_prowler_checks.return_value = {"checks_key": "checks_value"}
load_prowler_compliance()
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
"template_key": "template_value"
}
assert PROWLER_CHECKS == {"checks_key": "checks_value"}
expected_prowler_compliance = compliance_data_dict
mock_get_prowler_provider_compliance.assert_any_call("aws")
mock_get_prowler_provider_compliance.assert_any_call("azure")
mock_generate_compliance_overview_template.assert_called_once_with(
expected_prowler_compliance
)
mock_load_prowler_checks.assert_called_once_with(expected_prowler_compliance)
@patch("api.compliance.get_prowler_provider_checks")
@patch("api.models.Provider.ProviderChoices")
def test_load_prowler_checks(
self, mock_provider_choices, mock_get_prowler_provider_checks
):
mock_provider_choices.values = ["aws"]
mock_get_prowler_provider_checks.return_value = ["check1", "check2", "check3"]
prowler_compliance = {
"aws": {
"compliance1": MagicMock(
Requirements=[
MagicMock(
Checks=["check1", "check2"],
),
],
),
},
}
expected_checks = {
"aws": {
"check1": {"compliance1"},
"check2": {"compliance1"},
"check3": set(),
}
}
checks = load_prowler_checks(prowler_compliance)
assert checks == expected_checks
mock_get_prowler_provider_checks.assert_called_once_with("aws")
@patch("api.compliance.PROWLER_CHECKS", new_callable=dict)
def test_generate_scan_compliance(self, mock_prowler_checks):
mock_prowler_checks["aws"] = {
"check1": {"compliance1"},
"check2": {"compliance1", "compliance2"},
}
compliance_overview = {
"compliance1": {
"requirements": {
"requirement1": {
"checks": {"check1": None, "check2": None},
"checks_status": {
"pass": 0,
"fail": 0,
"manual": 0,
"total": 2,
},
"status": "PASS",
}
},
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
},
"compliance2": {
"requirements": {
"requirement2": {
"checks": {"check2": None},
"checks_status": {
"pass": 0,
"fail": 0,
"manual": 0,
"total": 1,
},
"status": "PASS",
}
},
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
},
}
provider_type = "aws"
check_id = "check2"
status = "FAIL"
generate_scan_compliance(compliance_overview, provider_type, check_id, status)
assert (
compliance_overview["compliance1"]["requirements"]["requirement1"][
"checks"
]["check2"]
== "FAIL"
)
assert (
compliance_overview["compliance1"]["requirements"]["requirement1"][
"checks_status"
]["fail"]
== 1
)
assert (
compliance_overview["compliance1"]["requirements"]["requirement1"]["status"]
== "FAIL"
)
assert compliance_overview["compliance1"]["requirements_status"]["passed"] == 0
assert compliance_overview["compliance1"]["requirements_status"]["failed"] == 1
assert (
compliance_overview["compliance2"]["requirements"]["requirement2"][
"checks"
]["check2"]
== "FAIL"
)
assert (
compliance_overview["compliance2"]["requirements"]["requirement2"][
"checks_status"
]["fail"]
== 1
)
assert (
compliance_overview["compliance2"]["requirements"]["requirement2"]["status"]
== "FAIL"
)
assert compliance_overview["compliance2"]["requirements_status"]["passed"] == 0
assert compliance_overview["compliance2"]["requirements_status"]["failed"] == 1
assert (
compliance_overview["compliance1"]["requirements"]["requirement1"][
"checks"
]["check1"]
is None
)
@patch("api.models.Provider.ProviderChoices")
def test_generate_compliance_overview_template(self, mock_provider_choices):
mock_provider_choices.values = ["aws"]
requirement1 = MagicMock(
Id="requirement1",
Name="Requirement 1",
Description="Description of requirement 1",
Attributes=[],
Checks=["check1", "check2"],
)
requirement2 = MagicMock(
Id="requirement2",
Name="Requirement 2",
Description="Description of requirement 2",
Attributes=[],
Checks=[],
)
compliance1 = MagicMock(
Requirements=[requirement1, requirement2],
Framework="Framework 1",
Version="1.0",
Description="Description of compliance1",
)
prowler_compliance = {"aws": {"compliance1": compliance1}}
template = generate_compliance_overview_template(prowler_compliance)
expected_template = {
"aws": {
"compliance1": {
"framework": "Framework 1",
"version": "1.0",
"provider": "aws",
"description": "Description of compliance1",
"requirements": {
"requirement1": {
"name": "Requirement 1",
"description": "Description of requirement 1",
"attributes": [],
"checks": {"check1": None, "check2": None},
"checks_status": {
"pass": 0,
"fail": 0,
"manual": 0,
"total": 2,
},
"status": "PASS",
},
"requirement2": {
"name": "Requirement 2",
"description": "Description of requirement 2",
"attributes": [],
"checks": {},
"checks_status": {
"pass": 0,
"fail": 0,
"manual": 0,
"total": 0,
},
"status": "PASS",
},
},
"requirements_status": {
"passed": 1, # total_requirements - manual
"failed": 0,
"manual": 1, # requirement2 has 0 checks
},
"total_requirements": 2,
}
}
}
assert template == expected_template

View File

@@ -1,31 +0,0 @@
import pytest
from django.conf import settings
from django.db.migrations.recorder import MigrationRecorder
from django.db.utils import ConnectionRouter
from api.db_router import MainRouter
from api.rls import Tenant
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
from unittest.mock import patch
@patch("api.db_router.MainRouter.admin_db", new="admin")
class TestMainDatabaseRouter:
@pytest.fixture(scope="module")
def router(self):
testing_routers = settings.DATABASE_ROUTERS.copy()
settings.DATABASE_ROUTERS = PROD_DATABASE_ROUTERS
yield ConnectionRouter()
settings.DATABASE_ROUTERS = testing_routers
@pytest.mark.parametrize("api_model", [Tenant])
def test_router_api_models(self, api_model, router):
assert router.db_for_read(api_model) == "default"
assert router.db_for_write(api_model) == "default"
assert router.allow_migrate_model(MainRouter.admin_db, api_model)
assert not router.allow_migrate_model("default", api_model)
def test_router_django_models(self, router):
assert router.db_for_read(MigrationRecorder.Migration) == MainRouter.admin_db
assert not router.db_for_read(MigrationRecorder.Migration) == "default"

View File

@@ -1,139 +0,0 @@
from datetime import datetime, timezone
from enum import Enum
from unittest.mock import patch
import pytest
from api.db_utils import (
batch_delete,
enum_to_choices,
generate_random_token,
one_week_from_now,
)
from api.models import Provider
class TestEnumToChoices:
def test_enum_to_choices_simple(self):
class Color(Enum):
RED = 1
GREEN = 2
BLUE = 3
expected_result = [
(1, "Red"),
(2, "Green"),
(3, "Blue"),
]
result = enum_to_choices(Color)
assert result == expected_result
def test_enum_to_choices_with_underscores(self):
class Status(Enum):
PENDING_APPROVAL = "pending"
IN_PROGRESS = "in_progress"
COMPLETED_SUCCESSFULLY = "completed"
expected_result = [
("pending", "Pending Approval"),
("in_progress", "In Progress"),
("completed", "Completed Successfully"),
]
result = enum_to_choices(Status)
assert result == expected_result
def test_enum_to_choices_empty_enum(self):
class EmptyEnum(Enum):
pass
expected_result = []
result = enum_to_choices(EmptyEnum)
assert result == expected_result
def test_enum_to_choices_numeric_values(self):
class Numbers(Enum):
ONE = 1
TWO = 2
THREE = 3
expected_result = [
(1, "One"),
(2, "Two"),
(3, "Three"),
]
result = enum_to_choices(Numbers)
assert result == expected_result
class TestOneWeekFromNow:
def test_one_week_from_now(self):
with patch("api.db_utils.datetime") as mock_datetime:
mock_datetime.now.return_value = datetime(2023, 1, 1, tzinfo=timezone.utc)
expected_result = datetime(2023, 1, 8, tzinfo=timezone.utc)
result = one_week_from_now()
assert result == expected_result
def test_one_week_from_now_with_timezone(self):
with patch("api.db_utils.datetime") as mock_datetime:
mock_datetime.now.return_value = datetime(
2023, 6, 15, 12, 0, tzinfo=timezone.utc
)
expected_result = datetime(2023, 6, 22, 12, 0, tzinfo=timezone.utc)
result = one_week_from_now()
assert result == expected_result
class TestGenerateRandomToken:
def test_generate_random_token_default_length(self):
token = generate_random_token()
assert len(token) == 14
def test_generate_random_token_custom_length(self):
length = 20
token = generate_random_token(length=length)
assert len(token) == length
def test_generate_random_token_with_symbols(self):
symbols = "ABC123"
token = generate_random_token(length=10, symbols=symbols)
assert len(token) == 10
assert all(char in symbols for char in token)
def test_generate_random_token_unique(self):
tokens = {generate_random_token() for _ in range(1000)}
# Assuming that generating 1000 tokens should result in unique values
assert len(tokens) == 1000
def test_generate_random_token_no_symbols_provided(self):
token = generate_random_token(length=5, symbols="")
# Default symbols
assert len(token) == 5
class TestBatchDelete:
@pytest.fixture
def create_test_providers(self, tenants_fixture):
tenant = tenants_fixture[0]
provider_id = 123456789012
provider_count = 10
for i in range(provider_count):
Provider.objects.create(
tenant=tenant,
uid=f"{provider_id + i}",
provider=Provider.ProviderChoices.AWS,
)
return provider_count
@pytest.mark.django_db
def test_batch_delete(self, create_test_providers):
_, summary = batch_delete(
Provider.objects.all(), batch_size=create_test_providers // 2
)
assert Provider.objects.all().count() == 0
assert summary == {"api.Provider": create_test_providers}

View File

@@ -1,36 +0,0 @@
import uuid
from unittest.mock import call, patch
import pytest
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
from api.decorators import set_tenant
@pytest.mark.django_db
class TestSetTenantDecorator:
@patch("api.decorators.connection.cursor")
def test_set_tenant(self, mock_cursor):
mock_cursor.return_value.__enter__.return_value = mock_cursor
@set_tenant
def random_func(arg):
return arg
tenant_id = str(uuid.uuid4())
result = random_func("test_arg", tenant_id=tenant_id)
assert (
call(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
in mock_cursor.execute.mock_calls
)
assert result == "test_arg"
def test_set_tenant_exception(self):
@set_tenant
def random_func(arg):
return arg
with pytest.raises(KeyError):
random_func("test_arg")

View File

@@ -1,54 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from django.http import HttpResponse
from django.test import RequestFactory
from api.middleware import APILoggingMiddleware
@pytest.mark.django_db
@patch("logging.getLogger")
def test_api_logging_middleware_logging(mock_logger):
factory = RequestFactory()
request = factory.get("/test-path?param1=value1&param2=value2")
request.method = "GET"
response = HttpResponse()
response.status_code = 200
get_response = MagicMock(return_value=response)
with patch("api.middleware.extract_auth_info") as mock_extract_auth_info:
mock_extract_auth_info.return_value = {
"user_id": "user123",
"tenant_id": "tenant456",
}
with patch("api.middleware.logging.getLogger") as mock_get_logger:
mock_logger = MagicMock()
mock_get_logger.return_value = mock_logger
middleware = APILoggingMiddleware(get_response)
with patch("api.middleware.time.time") as mock_time:
mock_time.side_effect = [1000.0, 1001.0] # Start time and end time
middleware(request)
get_response.assert_called_once_with(request)
mock_extract_auth_info.assert_called_once_with(request)
expected_extra = {
"user_id": "user123",
"tenant_id": "tenant456",
"method": "GET",
"path": "/test-path",
"query_params": {"param1": "value1", "param2": "value2"},
"status_code": 200,
"duration": 1.0,
}
mock_logger.info.assert_called_once_with("", extra=expected_extra)

View File

@@ -1,94 +0,0 @@
import pytest
from api.models import Resource, ResourceTag
@pytest.mark.django_db
class TestResourceModel:
def test_setting_tags(self, providers_fixture):
provider, *_ = providers_fixture
tenant_id = provider.tenant_id
resource = Resource.objects.create(
tenant_id=tenant_id,
provider=provider,
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0",
name="My Instance 1",
region="us-east-1",
service="ec2",
type="prowler-test",
)
tags = [
ResourceTag.objects.create(
tenant_id=tenant_id,
key="key",
value="value",
),
ResourceTag.objects.create(
tenant_id=tenant_id,
key="key2",
value="value2",
),
]
resource.upsert_or_delete_tags(tags)
assert len(tags) == len(resource.tags.filter(tenant_id=tenant_id))
tags_dict = resource.get_tags(tenant_id=tenant_id)
for tag in tags:
assert tag.key in tags_dict
assert tag.value == tags_dict[tag.key]
def test_adding_tags(self, resources_fixture):
resource, *_ = resources_fixture
tenant_id = str(resource.tenant_id)
tags = [
ResourceTag.objects.create(
tenant_id=tenant_id,
key="env",
value="test",
),
]
before_count = len(resource.tags.filter(tenant_id=tenant_id))
resource.upsert_or_delete_tags(tags)
assert before_count + 1 == len(resource.tags.filter(tenant_id=tenant_id))
tags_dict = resource.get_tags(tenant_id=tenant_id)
assert "env" in tags_dict
assert tags_dict["env"] == "test"
def test_adding_duplicate_tags(self, resources_fixture):
resource, *_ = resources_fixture
tenant_id = str(resource.tenant_id)
tags = resource.tags.filter(tenant_id=tenant_id)
before_count = len(resource.tags.filter(tenant_id=tenant_id))
resource.upsert_or_delete_tags(tags)
# should be the same number of tags
assert before_count == len(resource.tags.filter(tenant_id=tenant_id))
def test_add_tags_none(self, resources_fixture):
resource, *_ = resources_fixture
tenant_id = str(resource.tenant_id)
resource.upsert_or_delete_tags(None)
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
assert resource.get_tags(tenant_id=tenant_id) == {}
def test_clear_tags(self, resources_fixture):
resource, *_ = resources_fixture
tenant_id = str(resource.tenant_id)
resource.clear_tags()
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
assert resource.get_tags(tenant_id=tenant_id) == {}

View File

@@ -1,306 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from unittest.mock import patch, ANY, Mock
@pytest.mark.django_db
class TestUserViewSet:
def test_list_users_with_all_permissions(self, authenticated_client_rbac):
response = authenticated_client_rbac.get(reverse("user-list"))
assert response.status_code == status.HTTP_200_OK
assert isinstance(response.json()["data"], list)
def test_list_users_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
response = authenticated_client_no_permissions_rbac.get(reverse("user-list"))
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_retrieve_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.get(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
)
assert response.status_code == status.HTTP_200_OK
assert (
response.json()["data"]["attributes"]["email"]
== create_test_user_rbac.email
)
def test_retrieve_user_with_no_roles(
self, authenticated_client_rbac_noroles, create_test_user_rbac_no_roles
):
response = authenticated_client_rbac_noroles.get(
reverse("user-detail", kwargs={"pk": create_test_user_rbac_no_roles.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_retrieve_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.get(
reverse("user-detail", kwargs={"pk": create_test_user.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
valid_user_payload = {
"name": "test",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_rbac.post(
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
def test_create_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
valid_user_payload = {
"name": "test",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_no_permissions_rbac.post(
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
def test_partial_update_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
updated_data = {
"data": {
"type": "users",
"id": str(create_test_user_rbac.id),
"attributes": {"name": "Updated Name"},
},
}
response = authenticated_client_rbac.patch(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id}),
data=updated_data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["name"] == "Updated Name"
def test_partial_update_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
updated_data = {
"data": {
"type": "users",
"attributes": {"name": "Updated Name"},
}
}
response = authenticated_client_no_permissions_rbac.patch(
reverse("user-detail", kwargs={"pk": create_test_user.id}),
data=updated_data,
format="vnd.api+json",
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_delete_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.delete(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
def test_delete_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.delete(
reverse("user-detail", kwargs={"pk": create_test_user.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_me_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.get(reverse("user-me"))
assert response.status_code == status.HTTP_200_OK
assert (
response.json()["data"]["attributes"]["email"]
== create_test_user_rbac.email
)
def test_me_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.get(reverse("user-me"))
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["email"] == "rbac_limited@rbac.com"
@pytest.mark.django_db
class TestProviderViewSet:
def test_list_providers_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
response = authenticated_client_rbac.get(reverse("provider-list"))
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(providers_fixture)
def test_list_providers_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
response = authenticated_client_no_permissions_rbac.get(
reverse("provider-list")
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 0
def test_retrieve_provider_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_rbac.get(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["alias"] == provider.alias
def test_retrieve_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.get(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_create_provider_with_all_permissions(self, authenticated_client_rbac):
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
response = authenticated_client_rbac.post(
reverse("provider-list"), data=payload, format="json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["alias"] == "new_alias"
def test_create_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
response = authenticated_client_no_permissions_rbac.post(
reverse("provider-list"), data=payload, format="json"
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_partial_update_provider_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
provider = providers_fixture[0]
payload = {
"data": {
"type": "providers",
"id": provider.id,
"attributes": {"alias": "updated_alias"},
},
}
response = authenticated_client_rbac.patch(
reverse("provider-detail", kwargs={"pk": provider.id}),
data=payload,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["alias"] == "updated_alias"
def test_partial_update_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
update_payload = {
"data": {
"type": "providers",
"attributes": {"alias": "updated_alias"},
}
}
response = authenticated_client_no_permissions_rbac.patch(
reverse("provider-detail", kwargs={"pk": provider.id}),
data=update_payload,
format="vnd.api+json",
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@patch("api.v1.views.Task.objects.get")
@patch("api.v1.views.delete_provider_task.delay")
def test_delete_provider_with_all_permissions(
self,
mock_delete_task,
mock_task_get,
authenticated_client_rbac,
providers_fixture,
tasks_fixture,
):
prowler_task = tasks_fixture[0]
task_mock = Mock()
task_mock.id = prowler_task.id
mock_delete_task.return_value = task_mock
mock_task_get.return_value = prowler_task
provider1, *_ = providers_fixture
response = authenticated_client_rbac.delete(
reverse("provider-detail", kwargs={"pk": provider1.id})
)
assert response.status_code == status.HTTP_202_ACCEPTED
mock_delete_task.assert_called_once_with(
provider_id=str(provider1.id), tenant_id=ANY
)
assert "Content-Location" in response.headers
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
def test_delete_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.delete(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@patch("api.v1.views.Task.objects.get")
@patch("api.v1.views.check_provider_connection_task.delay")
def test_connection_with_all_permissions(
self,
mock_provider_connection,
mock_task_get,
authenticated_client_rbac,
providers_fixture,
tasks_fixture,
):
prowler_task = tasks_fixture[0]
task_mock = Mock()
task_mock.id = prowler_task.id
task_mock.status = "PENDING"
mock_provider_connection.return_value = task_mock
mock_task_get.return_value = prowler_task
provider1, *_ = providers_fixture
assert provider1.connected is None
assert provider1.connection_last_checked_at is None
response = authenticated_client_rbac.post(
reverse("provider-connection", kwargs={"pk": provider1.id})
)
assert response.status_code == status.HTTP_202_ACCEPTED
mock_provider_connection.assert_called_once_with(
provider_id=str(provider1.id), tenant_id=ANY
)
assert "Content-Location" in response.headers
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
def test_connection_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.post(
reverse("provider-connection", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN

View File

@@ -1,318 +0,0 @@
from datetime import datetime, timedelta, timezone
from unittest.mock import patch, MagicMock
import pytest
from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.azure.azure_provider import AzureProvider
from prowler.providers.gcp.gcp_provider import GcpProvider
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
from rest_framework.exceptions import ValidationError, NotFound
from api.db_router import MainRouter
from api.exceptions import InvitationTokenExpiredException
from api.models import Invitation
from api.models import Provider
from api.utils import (
merge_dicts,
return_prowler_provider,
initialize_prowler_provider,
prowler_provider_connection_test,
get_prowler_provider_kwargs,
)
from api.utils import validate_invitation
class TestMergeDicts:
def test_simple_merge(self):
default_dict = {"key1": "value1", "key2": "value2"}
replacement_dict = {"key2": "new_value2", "key3": "value3"}
expected_result = {"key1": "value1", "key2": "new_value2", "key3": "value3"}
result = merge_dicts(default_dict, replacement_dict)
assert result == expected_result
def test_nested_merge(self):
default_dict = {
"key1": "value1",
"key2": {"nested_key1": "nested_value1", "nested_key2": "nested_value2"},
}
replacement_dict = {
"key2": {
"nested_key2": "new_nested_value2",
"nested_key3": "nested_value3",
},
"key3": "value3",
}
expected_result = {
"key1": "value1",
"key2": {
"nested_key1": "nested_value1",
"nested_key2": "new_nested_value2",
"nested_key3": "nested_value3",
},
"key3": "value3",
}
result = merge_dicts(default_dict, replacement_dict)
assert result == expected_result
def test_no_overlap(self):
default_dict = {"key1": "value1"}
replacement_dict = {"key2": "value2"}
expected_result = {"key1": "value1", "key2": "value2"}
result = merge_dicts(default_dict, replacement_dict)
assert result == expected_result
def test_replacement_dict_empty(self):
default_dict = {"key1": "value1", "key2": "value2"}
replacement_dict = {}
expected_result = {"key1": "value1", "key2": "value2"}
result = merge_dicts(default_dict, replacement_dict)
assert result == expected_result
def test_default_dict_empty(self):
default_dict = {}
replacement_dict = {"key1": "value1", "key2": "value2"}
expected_result = {"key1": "value1", "key2": "value2"}
result = merge_dicts(default_dict, replacement_dict)
assert result == expected_result
def test_nested_empty_in_replacement_dict(self):
default_dict = {"key1": {"nested_key1": "nested_value1"}}
replacement_dict = {"key1": {}}
expected_result = {"key1": {}}
result = merge_dicts(default_dict, replacement_dict)
assert result == expected_result
def test_deep_nested_merge(self):
default_dict = {"key1": {"nested_key1": {"deep_key1": "deep_value1"}}}
replacement_dict = {"key1": {"nested_key1": {"deep_key1": "new_deep_value1"}}}
expected_result = {"key1": {"nested_key1": {"deep_key1": "new_deep_value1"}}}
result = merge_dicts(default_dict, replacement_dict)
assert result == expected_result
class TestReturnProwlerProvider:
@pytest.mark.parametrize(
"provider_type, expected_provider",
[
(Provider.ProviderChoices.AWS.value, AwsProvider),
(Provider.ProviderChoices.GCP.value, GcpProvider),
(Provider.ProviderChoices.AZURE.value, AzureProvider),
(Provider.ProviderChoices.KUBERNETES.value, KubernetesProvider),
],
)
def test_return_prowler_provider(self, provider_type, expected_provider):
provider = MagicMock()
provider.provider = provider_type
prowler_provider = return_prowler_provider(provider)
assert prowler_provider == expected_provider
def test_return_prowler_provider_unsupported_provider(self):
provider = MagicMock()
provider.provider = "UNSUPPORTED_PROVIDER"
with pytest.raises(ValueError):
return return_prowler_provider(provider)
class TestInitializeProwlerProvider:
@patch("api.utils.return_prowler_provider")
def test_initialize_prowler_provider(self, mock_return_prowler_provider):
provider = MagicMock()
provider.secret.secret = {"key": "value"}
mock_return_prowler_provider.return_value = MagicMock()
initialize_prowler_provider(provider)
mock_return_prowler_provider.return_value.assert_called_once_with(key="value")
class TestProwlerProviderConnectionTest:
@patch("api.utils.return_prowler_provider")
def test_prowler_provider_connection_test(self, mock_return_prowler_provider):
provider = MagicMock()
provider.uid = "1234567890"
provider.secret.secret = {"key": "value"}
mock_return_prowler_provider.return_value = MagicMock()
prowler_provider_connection_test(provider)
mock_return_prowler_provider.return_value.test_connection.assert_called_once_with(
key="value", provider_id="1234567890", raise_on_exception=False
)
class TestGetProwlerProviderKwargs:
@pytest.mark.parametrize(
"provider_type, expected_extra_kwargs",
[
(
Provider.ProviderChoices.AWS.value,
{},
),
(
Provider.ProviderChoices.AZURE.value,
{"subscription_ids": ["provider_uid"]},
),
(
Provider.ProviderChoices.GCP.value,
{"project_ids": ["provider_uid"]},
),
(
Provider.ProviderChoices.KUBERNETES.value,
{"context": "provider_uid"},
),
],
)
def test_get_prowler_provider_kwargs(self, provider_type, expected_extra_kwargs):
provider_uid = "provider_uid"
secret_dict = {"key": "value"}
secret_mock = MagicMock()
secret_mock.secret = secret_dict
provider = MagicMock()
provider.provider = provider_type
provider.secret = secret_mock
provider.uid = provider_uid
result = get_prowler_provider_kwargs(provider)
expected_result = {**secret_dict, **expected_extra_kwargs}
assert result == expected_result
def test_get_prowler_provider_kwargs_unsupported_provider(self):
# Setup
provider_uid = "provider_uid"
secret_dict = {"key": "value"}
secret_mock = MagicMock()
secret_mock.secret = secret_dict
provider = MagicMock()
provider.provider = "UNSUPPORTED_PROVIDER"
provider.secret = secret_mock
provider.uid = provider_uid
result = get_prowler_provider_kwargs(provider)
expected_result = secret_dict.copy()
assert result == expected_result
def test_get_prowler_provider_kwargs_no_secret(self):
# Setup
provider_uid = "provider_uid"
secret_mock = MagicMock()
secret_mock.secret = {}
provider = MagicMock()
provider.provider = Provider.ProviderChoices.AWS.value
provider.secret = secret_mock
provider.uid = provider_uid
result = get_prowler_provider_kwargs(provider)
expected_result = {}
assert result == expected_result
class TestValidateInvitation:
@pytest.fixture
def invitation(self):
invitation = MagicMock(spec=Invitation)
invitation.token = "VALID_TOKEN"
invitation.email = "user@example.com"
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
invitation.state = Invitation.State.PENDING
invitation.tenant = MagicMock()
return invitation
def test_valid_invitation(self, invitation):
with patch("api.utils.Invitation.objects.using") as mock_using:
mock_db = mock_using.return_value
mock_db.get.return_value = invitation
result = validate_invitation("VALID_TOKEN", "user@example.com")
assert result == invitation
mock_db.get.assert_called_once_with(
token="VALID_TOKEN", email="user@example.com"
)
def test_invitation_not_found_raises_validation_error(self):
with patch("api.utils.Invitation.objects.using") as mock_using:
mock_db = mock_using.return_value
mock_db.get.side_effect = Invitation.DoesNotExist
with pytest.raises(ValidationError) as exc_info:
validate_invitation("INVALID_TOKEN", "user@example.com")
assert exc_info.value.detail == {
"invitation_token": "Invalid invitation code."
}
mock_db.get.assert_called_once_with(
token="INVALID_TOKEN", email="user@example.com"
)
def test_invitation_not_found_raises_not_found(self):
with patch("api.utils.Invitation.objects.using") as mock_using:
mock_db = mock_using.return_value
mock_db.get.side_effect = Invitation.DoesNotExist
with pytest.raises(NotFound) as exc_info:
validate_invitation(
"INVALID_TOKEN", "user@example.com", raise_not_found=True
)
assert exc_info.value.detail == "Invitation is not valid."
mock_db.get.assert_called_once_with(
token="INVALID_TOKEN", email="user@example.com"
)
def test_invitation_expired(self, invitation):
expired_time = datetime.now(timezone.utc) - timedelta(days=1)
invitation.expires_at = expired_time
with patch("api.utils.Invitation.objects.using") as mock_using, patch(
"api.utils.datetime"
) as mock_datetime:
mock_db = mock_using.return_value
mock_db.get.return_value = invitation
mock_datetime.now.return_value = datetime.now(timezone.utc)
with pytest.raises(InvitationTokenExpiredException):
validate_invitation("VALID_TOKEN", "user@example.com")
# Ensure the invitation state was updated to EXPIRED
assert invitation.state == Invitation.State.EXPIRED
invitation.save.assert_called_once_with(using=MainRouter.admin_db)
def test_invitation_not_pending(self, invitation):
invitation.state = Invitation.State.ACCEPTED
with patch("api.utils.Invitation.objects.using") as mock_using:
mock_db = mock_using.return_value
mock_db.get.return_value = invitation
with pytest.raises(ValidationError) as exc_info:
validate_invitation("VALID_TOKEN", "user@example.com")
assert exc_info.value.detail == {
"invitation_token": "This invitation is no longer valid."
}
def test_invitation_with_different_email(self):
with patch("api.utils.Invitation.objects.using") as mock_using:
mock_db = mock_using.return_value
mock_db.get.side_effect = Invitation.DoesNotExist
with pytest.raises(ValidationError) as exc_info:
validate_invitation("VALID_TOKEN", "different@example.com")
assert exc_info.value.detail == {
"invitation_token": "Invalid invitation code."
}
mock_db.get.assert_called_once_with(
token="VALID_TOKEN", email="different@example.com"
)

View File

@@ -1,113 +0,0 @@
from datetime import datetime, timezone
from uuid import uuid4
import pytest
from dateutil.relativedelta import relativedelta
from rest_framework_json_api.serializers import ValidationError
from uuid6 import UUID
from api.uuid_utils import (
transform_into_uuid7,
datetime_to_uuid7,
datetime_from_uuid7,
uuid7_start,
uuid7_end,
uuid7_range,
)
def test_transform_into_uuid7_valid():
uuid_v7 = datetime_to_uuid7(datetime.now(timezone.utc))
transformed_uuid = transform_into_uuid7(uuid_v7)
assert transformed_uuid == UUID(hex=uuid_v7.hex.upper())
assert transformed_uuid.version == 7
def test_transform_into_uuid7_invalid_version():
uuid_v4 = uuid4()
with pytest.raises(ValidationError) as exc_info:
transform_into_uuid7(UUID(str(uuid_v4)))
assert str(exc_info.value.detail[0]) == "Invalid UUIDv7 value."
@pytest.mark.parametrize(
"input_datetime",
[
datetime(2024, 9, 11, 7, 20, 27, tzinfo=timezone.utc),
datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
],
)
def test_datetime_to_uuid7(input_datetime):
uuid7 = datetime_to_uuid7(input_datetime)
assert isinstance(uuid7, UUID)
assert uuid7.version == 7
expected_timestamp_ms = int(input_datetime.timestamp() * 1000) & 0xFFFFFFFFFFFF
assert uuid7.time == expected_timestamp_ms
@pytest.mark.parametrize(
"input_datetime",
[
datetime(2024, 9, 11, 7, 20, 27, tzinfo=timezone.utc),
datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
],
)
def test_datetime_from_uuid7(input_datetime):
uuid7 = datetime_to_uuid7(input_datetime)
extracted_datetime = datetime_from_uuid7(uuid7)
assert extracted_datetime == input_datetime
def test_datetime_from_uuid7_invalid():
uuid_v4 = uuid4()
with pytest.raises(ValueError):
datetime_from_uuid7(UUID(str(uuid_v4)))
def test_uuid7_start():
dt = datetime.now(timezone.utc)
uuid = datetime_to_uuid7(dt)
start_uuid = uuid7_start(uuid)
expected_dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
expected_timestamp_ms = int(expected_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
assert start_uuid.time == expected_timestamp_ms
assert start_uuid.version == 7
@pytest.mark.parametrize("months_offset", [0, 1, 10, 30, 60])
def test_uuid7_end(months_offset):
dt = datetime.now(timezone.utc)
uuid = datetime_to_uuid7(dt)
end_uuid = uuid7_end(uuid, months_offset)
expected_dt = dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
expected_dt += relativedelta(months=months_offset, microseconds=-1)
expected_timestamp_ms = int(expected_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
assert end_uuid.time == expected_timestamp_ms
assert end_uuid.version == 7
def test_uuid7_range():
dt_now = datetime.now(timezone.utc)
uuid_list = [
datetime_to_uuid7(dt_now),
datetime_to_uuid7(dt_now.replace(year=2023)),
datetime_to_uuid7(dt_now.replace(year=2024)),
datetime_to_uuid7(dt_now.replace(year=2025)),
]
start_uuid, end_uuid = uuid7_range(uuid_list)
# Expected start of range
start_dt = datetime_from_uuid7(min(uuid_list, key=lambda u: u.time))
start_dt = start_dt.replace(hour=0, minute=0, second=0, microsecond=0)
expected_start_timestamp_ms = int(start_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
# Expected end of range
end_dt = datetime_from_uuid7(max(uuid_list, key=lambda u: u.time))
end_dt = end_dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
end_dt += relativedelta(months=1, microseconds=-1)
expected_end_timestamp_ms = int(end_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
assert start_uuid.time == expected_start_timestamp_ms
assert end_uuid.time == expected_end_timestamp_ms
assert start_uuid.version == 7
assert end_uuid.version == 7

File diff suppressed because it is too large Load Diff

View File

@@ -1,189 +0,0 @@
from datetime import datetime, timezone
from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.azure.azure_provider import AzureProvider
from prowler.providers.common.models import Connection
from prowler.providers.gcp.gcp_provider import GcpProvider
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
from rest_framework.exceptions import ValidationError, NotFound
from api.db_router import MainRouter
from api.exceptions import InvitationTokenExpiredException
from api.models import Provider, Invitation
def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
"""
Recursively merge two dictionaries, using `default_dict` as the base and `replacement_dict` for overriding values.
Args:
default_dict (dict): The base dictionary containing default key-value pairs.
replacement_dict (dict): The dictionary containing values that should override those in `default_dict`.
Returns:
dict: A new dictionary containing all keys from `default_dict` with values from `replacement_dict` replacing
any overlapping keys. If a key in both `default_dict` and `replacement_dict` contains dictionaries,
this function will merge them recursively.
"""
result = default_dict.copy()
for key, value in replacement_dict.items():
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
if value:
result[key] = merge_dicts(result[key], value)
else:
result[key] = value
else:
result[key] = value
return result
def return_prowler_provider(
provider: Provider,
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider]:
"""Return the Prowler provider class based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secrets.
Returns:
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: The corresponding provider class.
Raises:
ValueError: If the provider type specified in `provider.provider` is not supported.
"""
match provider.provider:
case Provider.ProviderChoices.AWS.value:
prowler_provider = AwsProvider
case Provider.ProviderChoices.GCP.value:
prowler_provider = GcpProvider
case Provider.ProviderChoices.AZURE.value:
prowler_provider = AzureProvider
case Provider.ProviderChoices.KUBERNETES.value:
prowler_provider = KubernetesProvider
case _:
raise ValueError(f"Provider type {provider.provider} not supported")
return prowler_provider
def get_prowler_provider_kwargs(provider: Provider) -> dict:
"""Get the Prowler provider kwargs based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secret.
Returns:
dict: The provider kwargs for the corresponding provider class.
"""
prowler_provider_kwargs = provider.secret.secret
if provider.provider == Provider.ProviderChoices.AZURE.value:
prowler_provider_kwargs = {
**prowler_provider_kwargs,
"subscription_ids": [provider.uid],
}
elif provider.provider == Provider.ProviderChoices.GCP.value:
prowler_provider_kwargs = {
**prowler_provider_kwargs,
"project_ids": [provider.uid],
}
elif provider.provider == Provider.ProviderChoices.KUBERNETES.value:
prowler_provider_kwargs = {**prowler_provider_kwargs, "context": provider.uid}
return prowler_provider_kwargs
def initialize_prowler_provider(
provider: Provider,
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider:
"""Initialize a Prowler provider instance based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secrets.
Returns:
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: An instance of the corresponding provider class
(`AwsProvider`, `AzureProvider`, `GcpProvider`, or `KubernetesProvider`) initialized with the
provider's secrets.
"""
prowler_provider = return_prowler_provider(provider)
prowler_provider_kwargs = get_prowler_provider_kwargs(provider)
return prowler_provider(**prowler_provider_kwargs)
def prowler_provider_connection_test(provider: Provider) -> Connection:
"""Test the connection to a Prowler provider based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secrets.
Returns:
Connection: A connection object representing the result of the connection test for the specified provider.
"""
prowler_provider = return_prowler_provider(provider)
prowler_provider_kwargs = provider.secret.secret
return prowler_provider.test_connection(
**prowler_provider_kwargs, provider_id=provider.uid, raise_on_exception=False
)
def validate_invitation(
invitation_token: str, email: str, raise_not_found=False
) -> Invitation:
"""
Validates an invitation based on the provided token and email.
This function attempts to retrieve an Invitation object using the given
`invitation_token` and `email`. It performs several checks to ensure that
the invitation is valid, not expired, and in the correct state for acceptance.
Args:
invitation_token (str): The token associated with the invitation.
email (str): The email address associated with the invitation.
raise_not_found (bool, optional): If True, raises a `NotFound` exception
when the invitation is not found. If False, raises a `ValidationError`.
Defaults to False.
Returns:
Invitation: The validated Invitation object.
Raises:
NotFound: If `raise_not_found` is True and the invitation does not exist.
ValidationError: If the invitation does not exist and `raise_not_found`
is False, or if the invitation is invalid or in an incorrect state.
InvitationTokenExpiredException: If the invitation has expired.
Notes:
- This function uses the admin database connector to bypass RLS protection
since the invitation may belong to a tenant the user is not a member of yet.
- If the invitation has expired, its state is updated to EXPIRED, and an
`InvitationTokenExpiredException` is raised.
- Only invitations in the PENDING state can be accepted.
Examples:
invitation = validate_invitation("TOKEN123", "user@example.com")
"""
try:
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
# is not a member of yet
invitation = Invitation.objects.using(MainRouter.admin_db).get(
token=invitation_token, email=email
)
except Invitation.DoesNotExist:
if raise_not_found:
raise NotFound(detail="Invitation is not valid.")
else:
raise ValidationError({"invitation_token": "Invalid invitation code."})
# Check if the invitation has expired
if invitation.expires_at < datetime.now(timezone.utc):
invitation.state = Invitation.State.EXPIRED
invitation.save(using=MainRouter.admin_db)
raise InvitationTokenExpiredException()
# Check the state of the invitation
if invitation.state != Invitation.State.PENDING:
raise ValidationError(
{"invitation_token": "This invitation is no longer valid."}
)
return invitation

View File

@@ -1,148 +0,0 @@
from datetime import datetime, timezone
from random import getrandbits
from dateutil.relativedelta import relativedelta
from rest_framework_json_api.serializers import ValidationError
from uuid6 import UUID
def transform_into_uuid7(uuid_obj: UUID) -> UUID:
"""
Validates that the given UUID object is a UUIDv7 and returns it.
This function checks if the provided UUID object is of version 7.
If it is, it returns a new UUID object constructed from the uppercase
hexadecimal representation of the input UUID. If not, it raises a ValidationError.
Args:
uuid_obj (UUID): The UUID object to validate and transform.
Returns:
UUID: A new UUIDv7 object constructed from the uppercase hexadecimal
representation of the input UUID.
Raises:
ValidationError: If the provided UUID is not a version 7 UUID.
"""
try:
if uuid_obj.version != 7:
raise ValueError
return UUID(hex=uuid_obj.hex.upper())
except ValueError:
raise ValidationError("Invalid UUIDv7 value.")
def datetime_to_uuid7(dt: datetime) -> UUID:
"""
Generates a UUIDv7 from a given datetime object.
Constructs a UUIDv7 using the provided datetime timestamp.
Ensures that the version and variant bits are set correctly.
Args:
dt: A datetime object representing the desired timestamp for the UUIDv7.
Returns:
A UUIDv7 object corresponding to the given datetime.
"""
timestamp_ms = int(dt.timestamp() * 1000) & 0xFFFFFFFFFFFF # 48 bits
# Generate 12 bits of randomness for the sequence
rand_seq = getrandbits(12)
# Generate 62 bits of randomness for the node
rand_node = getrandbits(62)
# Build the UUID integer
uuid_int = timestamp_ms << 80 # Shift timestamp to bits 80-127
# Set the version to 7 in bits 76-79
uuid_int |= 0x7 << 76
# Set 12 bits of randomness in bits 64-75
uuid_int |= rand_seq << 64
# Set the variant to "10" in bits 62-63
uuid_int |= 0x2 << 62
# Set 62 bits of randomness in bits 0-61
uuid_int |= rand_node
return UUID(int=uuid_int)
def datetime_from_uuid7(uuid7: UUID) -> datetime:
"""
Extracts the timestamp from a UUIDv7 and returns it as a datetime object.
Args:
uuid7: A UUIDv7 object.
Returns:
A datetime object representing the timestamp encoded in the UUIDv7.
"""
timestamp_ms = uuid7.time
return datetime.fromtimestamp(timestamp_ms / 1000, tz=timezone.utc)
def uuid7_start(uuid_obj: UUID) -> UUID:
"""
Returns a UUIDv7 that represents the start of the day for the given UUID.
Args:
uuid_obj: A UUIDv7 object.
Returns:
A UUIDv7 object representing the start of the day for the given UUID's timestamp.
"""
start_of_day = datetime_from_uuid7(uuid_obj).replace(
hour=0, minute=0, second=0, microsecond=0
)
return datetime_to_uuid7(start_of_day)
def uuid7_end(uuid_obj: UUID, offset_months: int = 1) -> UUID:
"""
Returns a UUIDv7 that represents the end of the month for the given UUID.
Args:
uuid_obj: A UUIDv7 object.
offset_days: Number of months to offset from the given UUID's date. Defaults to 1 to handle if
partitions are not being used, if so the value will be the one set at FINDINGS_TABLE_PARTITION_MONTHS.
Returns:
A UUIDv7 object representing the end of the month for the given UUID's date plus offset_months.
"""
end_of_month = datetime_from_uuid7(uuid_obj).replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
end_of_month += relativedelta(months=offset_months, microseconds=-1)
return datetime_to_uuid7(end_of_month)
def uuid7_range(uuid_list: list[UUID]) -> list[UUID]:
"""
For the given list of UUIDv7s, returns the start and end UUIDv7 values that represent
the range of days covered by the UUIDs.
Args:
uuid_list: A list of UUIDv7 objects.
Returns:
A list containing two UUIDv7 objects: the start and end of the day range.
Raises:
ValidationError: If the list is empty or contains invalid UUIDv7 objects.
"""
if not uuid_list:
raise ValidationError("UUID list is empty.")
try:
start_uuid = min(uuid_list, key=lambda u: u.time)
end_uuid = max(uuid_list, key=lambda u: u.time)
except AttributeError:
raise ValidationError("Invalid UUIDv7 objects in the list.")
start_range = uuid7_start(start_uuid)
end_range = uuid7_end(end_uuid)
return [start_range, end_range]

File diff suppressed because it is too large Load Diff

View File

@@ -1,114 +0,0 @@
from django.urls import include, path
from drf_spectacular.views import SpectacularRedocView
from rest_framework_nested import routers
from api.v1.views import (
CustomTokenObtainView,
CustomTokenRefreshView,
FindingViewSet,
MembershipViewSet,
ProviderGroupViewSet,
ProviderGroupProvidersRelationshipView,
ProviderSecretViewSet,
InvitationViewSet,
InvitationAcceptViewSet,
RoleViewSet,
RoleProviderGroupRelationshipView,
UserRoleRelationshipView,
OverviewViewSet,
ComplianceOverviewViewSet,
ProviderViewSet,
ResourceViewSet,
ScanViewSet,
ScheduleViewSet,
SchemaView,
TaskViewSet,
TenantMembersViewSet,
TenantViewSet,
UserViewSet,
)
router = routers.DefaultRouter(trailing_slash=False)
router.register(r"users", UserViewSet, basename="user")
router.register(r"tenants", TenantViewSet, basename="tenant")
router.register(r"providers", ProviderViewSet, basename="provider")
router.register(r"provider-groups", ProviderGroupViewSet, basename="providergroup")
router.register(r"scans", ScanViewSet, basename="scan")
router.register(r"tasks", TaskViewSet, basename="task")
router.register(r"resources", ResourceViewSet, basename="resource")
router.register(r"findings", FindingViewSet, basename="finding")
router.register(r"roles", RoleViewSet, basename="role")
router.register(
r"compliance-overviews", ComplianceOverviewViewSet, basename="complianceoverview"
)
router.register(r"overviews", OverviewViewSet, basename="overview")
router.register(r"schedules", ScheduleViewSet, basename="schedule")
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
tenants_router.register(
r"memberships", TenantMembersViewSet, basename="tenant-membership"
)
users_router = routers.NestedSimpleRouter(router, r"users", lookup="user")
users_router.register(r"memberships", MembershipViewSet, basename="user-membership")
urlpatterns = [
path("tokens", CustomTokenObtainView.as_view(), name="token-obtain"),
path("tokens/refresh", CustomTokenRefreshView.as_view(), name="token-refresh"),
path(
"providers/secrets",
ProviderSecretViewSet.as_view({"get": "list", "post": "create"}),
name="providersecret-list",
),
path(
"providers/secrets/<uuid:pk>",
ProviderSecretViewSet.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="providersecret-detail",
),
path(
"tenants/invitations",
InvitationViewSet.as_view({"get": "list", "post": "create"}),
name="invitation-list",
),
path(
"tenants/invitations/<uuid:pk>",
InvitationViewSet.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="invitation-detail",
),
path(
"invitations/accept",
InvitationAcceptViewSet.as_view({"post": "accept"}),
name="invitation-accept",
),
path(
"roles/<uuid:pk>/relationships/provider_groups",
RoleProviderGroupRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="role-provider-groups-relationship",
),
path(
"users/<uuid:pk>/relationships/roles",
UserRoleRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="user-roles-relationship",
),
path(
"provider-groups/<uuid:pk>/relationships/providers",
ProviderGroupProvidersRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="provider_group-providers-relationship",
),
path("", include(router.urls)),
path("", include(tenants_router.urls)),
path("", include(users_router.urls)),
path("schema", SchemaView.as_view(), name="schema"),
path("docs", SpectacularRedocView.as_view(url_name="schema"), name="docs"),
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +0,0 @@
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
class MaximumLengthValidator:
def __init__(self, max_length=72):
self.max_length = max_length
def validate(self, password, user=None):
if len(password) > self.max_length:
raise ValidationError(
_(
"This password is too long. It must contain no more than %(max_length)d characters."
),
code="password_too_long",
params={"max_length": self.max_length},
)
def get_help_text(self):
return _(
f"Your password must contain no more than {self.max_length} characters."
)

View File

@@ -1,16 +0,0 @@
"""
ASGI config for backend project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/5.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
application = get_asgi_application()

View File

@@ -1,58 +0,0 @@
from celery import Celery, Task
from config.env import env
BROKER_VISIBILITY_TIMEOUT = env.int("DJANGO_BROKER_VISIBILITY_TIMEOUT", default=86400)
celery_app = Celery("tasks")
celery_app.config_from_object("django.conf:settings", namespace="CELERY")
celery_app.conf.update(result_extended=True, result_expires=None)
celery_app.conf.broker_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
}
celery_app.conf.result_backend_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
}
celery_app.conf.visibility_timeout = BROKER_VISIBILITY_TIMEOUT
celery_app.autodiscover_tasks(["api"])
class RLSTask(Task):
def apply_async(
self,
args=None,
kwargs=None,
task_id=None,
producer=None,
link=None,
link_error=None,
shadow=None,
**options,
):
from django_celery_results.models import TaskResult
from api.models import Task as APITask
result = super().apply_async(
args=args,
kwargs=kwargs,
task_id=task_id,
producer=producer,
link=link,
link_error=link_error,
shadow=shadow,
**options,
)
task_result_instance = TaskResult.objects.get(task_id=result.task_id)
from api.db_utils import rls_transaction
tenant_id = kwargs.get("tenant_id")
with rls_transaction(tenant_id):
APITask.objects.create(
id=task_result_instance.task_id,
tenant_id=tenant_id,
task_runner_task=task_result_instance,
)
return result

View File

@@ -1,230 +0,0 @@
import json
import logging
from enum import StrEnum
from django_guid.log_filters import CorrelationId
from config.env import env
class BackendLogger(StrEnum):
GUNICORN = "gunicorn"
GUNICORN_ACCESS = "gunicorn.access"
GUNICORN_ERROR = "gunicorn.error"
DJANGO = "django"
SECURITY = "django.security"
DB = "django.db"
API = "api"
TASKS = "tasks"
# Formatters
class NDJSONFormatter(logging.Formatter):
"""NDJSON custom formatter for logging messages.
If available, it will include all kind of API request metadata.
"""
def format(self, record):
log_record = {
"timestamp": self.formatTime(record, self.datefmt),
"level": record.levelname,
"message": record.getMessage(),
"logger": record.name,
"module": record.module,
"pathname": record.pathname,
"lineno": record.lineno,
"funcName": record.funcName,
"process": record.process,
"thread": record.thread,
"transaction_id": record.transaction_id
if hasattr(record, "transaction_id")
else None,
}
# Add REST API extra fields
if hasattr(record, "user_id"):
log_record["user_id"] = record.user_id
if hasattr(record, "tenant_id"):
log_record["tenant_id"] = record.tenant_id
if hasattr(record, "method"):
log_record["method"] = record.method
if hasattr(record, "path"):
log_record["path"] = record.path
if hasattr(record, "query_params"):
log_record["query_params"] = record.query_params
if hasattr(record, "duration"):
log_record["duration"] = record.duration
if hasattr(record, "status_code"):
log_record["status_code"] = record.status_code
if record.exc_info:
log_record["exc_info"] = self.formatException(record.exc_info)
return json.dumps(log_record)
class HumanReadableFormatter(logging.Formatter):
"""Human-readable custom formatter for logging messages.
If available, it will include all kinds of API request metadata.
"""
def format(self, record):
log_components = [
f"{self.formatTime(record, self.datefmt)}",
f"[{record.name}]",
f"{record.levelname}:",
f"({record.module})",
f"[module={record.module}",
f"path={record.pathname}",
f"line={record.lineno}",
f"function={record.funcName}",
f"process={record.process}",
f"thread={record.thread}",
f"transaction-id={record.transaction_id if hasattr(record, 'transaction_id') else None}]",
f"{record.getMessage()}",
]
# Add REST API extra fields
if hasattr(record, "user_id"):
log_components.append(f"({record.user_id})")
if hasattr(record, "tenant_id"):
log_components.append(f"[{record.tenant_id}]")
if hasattr(record, "method"):
log_components.append(f'"{record.method} {record.path}"')
if hasattr(record, "query_params"):
log_components.append(f"with parameters {record.query_params}")
if hasattr(record, "duration"):
log_components.append(f"done in {record.duration}s:")
if hasattr(record, "status_code"):
log_components.append(f"{record.status_code}")
if record.exc_info:
log_components.append(self.formatException(record.exc_info))
return " ".join(log_components)
# Filters
class TransactionIdFilter(CorrelationId):
"""Logging filter class.
Used to override the `correlation_id_field` parameter in the parent class to use a different name.
"""
CORRELATION_ID_FIELD = "transaction_id"
def __init__(self):
super().__init__(correlation_id_field=self.CORRELATION_ID_FIELD)
# Logging settings
LEVEL = env("DJANGO_LOGGING_LEVEL", default="INFO")
FORMATTER = env("DJANGO_LOGGING_FORMATTER", default="ndjson")
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"filters": {"transaction_id": {"()": TransactionIdFilter}},
"formatters": {
"ndjson": {
"()": NDJSONFormatter,
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"human_readable": {
"()": HumanReadableFormatter,
"datefmt": "%Y-%m-%d %H:%M:%S",
},
},
"handlers": {
"gunicorn_console": {
"level": LEVEL,
"class": "logging.StreamHandler",
"formatter": FORMATTER,
"filters": ["transaction_id"],
},
"django_console": {
"level": LEVEL,
"class": "logging.StreamHandler",
"formatter": FORMATTER,
"filters": ["transaction_id"],
},
"api_console": {
"level": LEVEL,
"class": "logging.StreamHandler",
"formatter": FORMATTER,
"filters": ["transaction_id"],
},
"db_console": {
"level": f"{'DEBUG' if LEVEL == 'DEBUG' else 'INFO'}",
"class": "logging.StreamHandler",
"formatter": FORMATTER,
"filters": ["transaction_id"],
},
"security_console": {
"level": LEVEL,
"class": "logging.StreamHandler",
"formatter": FORMATTER,
"filters": ["transaction_id"],
},
"tasks_console": {
"level": LEVEL,
"class": "logging.StreamHandler",
"formatter": FORMATTER,
"filters": ["transaction_id"],
},
},
"loggers": {
BackendLogger.GUNICORN: {
"handlers": ["gunicorn_console"],
"level": LEVEL,
"propagate": False,
},
BackendLogger.GUNICORN_ACCESS: {
"handlers": ["gunicorn_console"],
"level": "CRITICAL",
"propagate": False,
},
BackendLogger.GUNICORN_ERROR: {
"handlers": ["gunicorn_console"],
"level": LEVEL,
"propagate": False,
},
BackendLogger.DJANGO: {
"handlers": ["django_console"],
"level": "WARNING",
"propagate": True,
},
BackendLogger.DB: {
"handlers": ["db_console"],
"level": LEVEL,
"propagate": False,
},
BackendLogger.SECURITY: {
"handlers": ["security_console"],
"level": LEVEL,
"propagate": False,
},
BackendLogger.API: {
"handlers": ["api_console"],
"level": LEVEL,
"propagate": False,
},
BackendLogger.TASKS: {
"handlers": ["tasks_console"],
"level": LEVEL,
"propagate": False,
},
},
# Gunicorn required configuration
"root": {
"level": "ERROR",
"handlers": ["gunicorn_console"],
},
}

Some files were not shown because too many files have changed in this diff Show More