Compare commits

..

103 Commits

Author SHA1 Message Date
pedrooot
ded0fd9b36 feat(cloudwatch): revert changes from report creation 2024-12-18 17:11:03 +01:00
pedrooot
a11bef6622 feat(check_cloudwatch_log_metric_filter): add comprobations for Nonetypes 2024-12-18 16:52:51 +01:00
pedrooot
9575d22c72 fix(checks): add getattr to fix NoneType errors 2024-12-18 16:00:17 +01:00
Pepe Fagoaga
fd5e7b809f docs: add note about containers arch (#6236) 2024-12-18 11:09:35 +01:00
dependabot[bot]
1ac681226d chore(deps): bump botocore from 1.35.81 to 1.35.83 (#6232)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-17 18:52:46 -05:00
dependabot[bot]
366940298d chore(deps): bump trufflesecurity/trufflehog from 3.86.1 to 3.87.0 (#6234)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-17 18:51:32 -05:00
Adrián Jesús Peña Rodríguez
fa400ded7d ref(rbac): improve rbac implementation for views (#6226) 2024-12-17 18:11:48 +01:00
dependabot[bot]
ec9455ff75 chore(deps): bump boto3 from 1.35.80 to 1.35.81 (#6218)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-17 11:32:30 -05:00
Daniel Barranquero
2183f31ff5 feat(ec2): add new fixers for internet exposed ports (#6223) 2024-12-17 10:04:00 -05:00
Prowler Bot
67257a4212 chore(regions_update): Changes in regions for AWS services (#6222)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2024-12-17 10:00:52 -05:00
Pedro Martín
001fa60a11 feat(mutelist): add description field (#6221)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-12-17 15:13:55 +01:00
Víctor Fernández Poyatos
0ec3ed8be7 feat(services): Add GET /overviews/services to API (#6029) 2024-12-17 08:47:44 +01:00
dependabot[bot]
3ed0b8a464 chore(deps-dev): bump mkdocs-material from 9.5.48 to 9.5.49 (#6217)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-17 08:42:55 +01:00
Pedro Martín
fd610d44c0 refactor(gcp): use always <client>.region for checks (#6206) 2024-12-16 18:21:42 -05:00
Adrián Jesús Peña Rodríguez
b8cc4b4f0f feat(stepfunctions): add stepfunctions service and check stepfunctions_statemachine_logging_enabled (#5466)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-12-16 11:34:02 -05:00
Pedro Martín
396e51c27d feat(gcp): add service account credentials (#6165) 2024-12-16 10:11:32 -05:00
Daniel Barranquero
36e61cb7a2 feat(ec2): add new fixer ec2_ami_public_fixer (#6177) 2024-12-16 10:09:14 -05:00
Daniel Barranquero
78c6484ddb feat(cloudtrail): add new fixer cloudtrail_logs_s3_bucket_is_not_publicly_accessible_fixer (#6174) 2024-12-16 10:05:34 -05:00
Daniel Barranquero
3f1e90a5b3 feat(s3): add new fixer s3_bucket_policy_public_write_access_fixer (#6173) 2024-12-16 10:01:38 -05:00
dependabot[bot]
e1bfec898f chore(deps): bump botocore from 1.35.80 to 1.35.81 (#6199)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-16 09:57:03 -05:00
dependabot[bot]
b5b816dac9 chore(deps): bump boto3 from 1.35.79 to 1.35.80 (#6198)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-16 07:51:44 -05:00
Pepe Fagoaga
57854f23b7 chore(rls): rename tenant_transaction to rls_transaction (#6202) 2024-12-16 12:27:55 +01:00
Rubén De la Torre Vico
9d7499b74f fix(azure): custom Prowler Role for Azure assignableScopes (#6149) 2024-12-16 08:34:17 +01:00
dependabot[bot]
5b0b85c0f8 chore(deps): bump actions/setup-node from 3 to 4 (#5893)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-13 14:57:27 +01:00
Pedro Martín
f7e8df618b chore(labeler): add provider github (#6194) 2024-12-13 09:43:49 -04:00
Adrián Jesús Peña Rodríguez
d00d254c90 feat(api): RBAC system (#6114) 2024-12-13 14:14:40 +01:00
dependabot[bot]
f9fbde6637 chore(deps): bump botocore from 1.35.79 to 1.35.80 (#6172)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-13 13:20:40 +01:00
Sergio Garcia
7b1a0474db fix(aws): set unique resource IDs (#6152) 2024-12-13 13:00:38 +01:00
Pepe Fagoaga
da4f9b8e5f fix(RLS): enforce config security (#6066) 2024-12-13 12:55:09 +01:00
Pepe Fagoaga
32f69d24b6 fix: dependabot syntax (#6181) 2024-12-13 12:20:43 +01:00
Pepe Fagoaga
d032a61a9e chore(dependabot): Add docker (#6180) 2024-12-13 12:13:53 +01:00
dependabot[bot]
07e0dc2ef5 chore(deps): bump cross-spawn from 7.0.3 to 7.0.6 in /ui (#5881)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-12-13 08:25:57 +01:00
dependabot[bot]
9e175e8504 chore(deps): bump nanoid from 3.3.7 to 3.3.8 in /ui (#6110)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-13 07:59:50 +01:00
dependabot[bot]
6b8a434cda chore(deps): bump boto3 from 1.35.78 to 1.35.79 (#6171)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-13 07:58:58 +01:00
Pepe Fagoaga
554491a642 chore(gha): build and push OSS UI (#6168) 2024-12-12 19:10:44 +01:00
Pedro Martín
dc4e2f3c85 feat(GHA): build containers for API (#6032)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-12-12 19:05:25 +01:00
Daniel Barranquero
7d2c50991b feat(s3): add new fixer s3_bucket_public_access_fixer (#6164)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-12 12:17:41 -04:00
Pedro Martín
83c204e010 fix(rds): add invalid SG to status_extended (#6157) 2024-12-12 11:51:09 -04:00
dependabot[bot]
316eb049dd chore(deps): bump botocore from 1.35.78 to 1.35.79 (#6153)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 11:29:23 -04:00
Daniel Barranquero
be347b2428 feat(ec2): add new check ec2_launch_template_imdsv2_required (#6139)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-12 11:27:20 -04:00
Daniel Barranquero
a90c772827 feat(s3): add new fixer s3_bucket_public_list_acl_fixer (#6166) 2024-12-12 11:16:46 -04:00
Daniel Barranquero
26c70976c0 feat(s3): add new fixer s3_bucket_public_write_acl_fixer (#5855) 2024-12-12 11:10:43 -04:00
dependabot[bot]
657310dc25 chore(deps): bump boto3 from 1.35.77 to 1.35.78 (#6154)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 10:39:22 -04:00
Daniel Barranquero
6e595eaf92 feat(ec2): add new fixer ec2_instance_port_cifs_exposed_to_internet_fixer (#6159) 2024-12-12 09:22:56 -04:00
Prowler Bot
997831e33d chore(regions_update): Changes in regions for AWS services (#6158)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2024-12-12 09:10:46 -04:00
dependabot[bot]
5920cdc48f chore(deps): bump trufflesecurity/trufflehog from 3.86.0 to 3.86.1 (#6156)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 09:10:20 -04:00
dependabot[bot]
971e73f9cb chore(deps): bump google-api-python-client from 2.154.0 to 2.155.0 (#6155)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 09:09:51 -04:00
Mads Brouer Lundholm
bd9673c9de fix(aurora): Add default ports to the check of using non default ports (#5821)
Co-authored-by: Mads Rantala Lundholm <mao@bankdata.dk>
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-12-11 13:01:45 -04:00
johannes-engler-mw
eded97d735 feat(azure): check for minimal TLS version for Azure SQL server (#5745)
Co-authored-by: Rubén De la Torre Vico <ruben@prowler.com>
2024-12-11 16:37:53 +01:00
Daniel Barranquero
fdb1956b0b feat(opensearch): add new fixer opensearch_service_domains_not_publicly_accessible_fixer (#5926) 2024-12-11 11:29:48 -04:00
Daniel Barranquero
a915c04e9e fix(autoscaling): autoscaling_group_launch_configuration_requires_imdsv2 fails if Launch Template is used (#6111)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-11 11:18:30 -04:00
Daniel Barranquero
07178ac69a feat(glacier): add new fixer glacier_vaults_policy_public_access_fixer (#5950) 2024-12-11 11:10:12 -04:00
Daniel Barranquero
9b434d4856 feat(ecr): add new fixer ecr_repositories_not_publicly_accessible_fixer (#5923) 2024-12-11 10:42:11 -04:00
dependabot[bot]
0758e97628 chore(deps): bump botocore from 1.35.77 to 1.35.78 (#6132)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-11 10:19:37 -04:00
Sergio Garcia
b486007f95 fix(README): show latest release (#6145) 2024-12-11 10:19:06 -04:00
dependabot[bot]
0c0887afef chore(deps): bump trufflesecurity/trufflehog from 3.85.0 to 3.86.0 (#6130)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-11 09:29:14 -04:00
dependabot[bot]
805ed81031 chore(deps): bump boto3 from 1.35.76 to 1.35.77 (#6131)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-11 09:26:07 -04:00
Prowler Bot
ec3fddf5b1 chore(regions_update): Changes in regions for AWS services (#6136)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2024-12-11 09:25:17 -04:00
Rubén De la Torre Vico
d7b0bc02ba feat(app): add support for TLS 1.3 to Web Apps check (#6004) 2024-12-11 13:14:29 +01:00
Pablo Lara
4d1c8eae8f feat(users): user detail can be edited now properly (#6135) 2024-12-11 10:05:30 +01:00
Sergio Garcia
989ccf4ae3 fix(iam): set unique resource id for each user access key (#6128) 2024-12-11 09:13:49 +01:00
Pedro Martín
9c089756c3 fix(compliance_tables): add correct values for findings (#6122)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-10 15:40:45 -04:00
Hugo Pereira Brito
8d4b0914a8 fix(aws): get firewall manager managed rule groups (#6119) 2024-12-10 15:34:22 -04:00
Hugo Pereira Brito
1ae3f89aab fix(aws): check AWS Owned keys in firehose_stream_encrypted_at_rest (#6108) 2024-12-10 13:42:13 -04:00
Daniel Barranquero
b984f0423a feat(sqs): add new fixer sqs_queues_not_publicly_accessible_fixer (#5911)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-10 12:26:42 -04:00
Sergio Garcia
f2f196cfcd fix(aws): set IAM identity as resource in threat detection (#6048) 2024-12-10 17:03:01 +01:00
dependabot[bot]
6471d936bb chore(deps): bump msgraph-sdk from 1.12.0 to 1.14.0 (#5957)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 11:42:40 -04:00
Adrián Jesús Peña Rodríguez
21bbdccc41 fix(deploy): temporal fix for the alpine-python segmentation fault (#6109) 2024-12-10 16:27:52 +01:00
Sergio Garcia
48946fa4f7 fix(gcp): make sure default project is active (#6097) 2024-12-10 11:06:48 -04:00
dependabot[bot]
9312dda7c2 chore(deps): bump microsoft-kiota-abstractions from 1.6.2 to 1.6.6 (#6038)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 10:37:04 -04:00
dependabot[bot]
e3013329ee chore(deps): bump botocore from 1.35.76 to 1.35.77 (#6098)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 09:26:36 -04:00
Sergio Garcia
38a0d2d740 fix(aws): set same severity for EC2 IMDSv2 checks (#6046) 2024-12-10 08:55:41 +01:00
Mario Rodriguez Lopez
5c2adf1e14 docs(unitesting): Make some fixes to the documentation (#6102) 2024-12-10 08:51:19 +01:00
Daniel Barranquero
7ddd2c04c8 feat(awslambda): add new fixer awslambda_function_not_publicly_accessible_fixer (#5840) 2024-12-09 12:28:42 -04:00
Pepe Fagoaga
9a55632d8e fix(backport): more than one backport tag is allowed (#6090) 2024-12-09 17:19:33 +01:00
dependabot[bot]
f8b4427505 chore(deps-dev): bump vulture from 2.13 to 2.14 (#6068)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 11:10:41 -04:00
Sergio Garcia
f1efc1456d chore(dependabot): change interval of PRs (#6086) 2024-12-09 15:46:28 +01:00
Sergio Garcia
2ea5851b67 docs(api): add commands to run API scheduler (#6085) 2024-12-09 10:34:02 -04:00
dependabot[bot]
a3051bc4e3 chore(deps-dev): bump mkdocs-material from 9.5.47 to 9.5.48 (#6073)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 10:14:08 -04:00
Pepe Fagoaga
d454427b8b fix(backport): remove v from branch prefix (#6081) 2024-12-09 10:13:20 -04:00
Pepe Fagoaga
4b41bd6adf chore(containers): support for v4.6 branch (#6063)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-09 09:23:06 -04:00
Pepe Fagoaga
cdd044d120 chore(dependabot): Update for UI and v4 (#6062) 2024-12-09 09:15:03 -04:00
Pepe Fagoaga
213a793fbc chore(actions): standardize names (#6059) 2024-12-09 09:14:06 -04:00
Pepe Fagoaga
a8a567c588 docs: Prowler SaaS -> Cloud and add missing compliance (#6061) 2024-12-09 09:12:54 -04:00
Pepe Fagoaga
fefe89a1ed fix(backport): Add action to detect labels (#5270) 2024-12-09 09:12:08 -04:00
Sergio Garcia
493fe2d523 docs(env): move warning about env files (#6049) 2024-12-09 11:11:05 +01:00
dependabot[bot]
d8fc830f1d chore(deps): bump boto3 from 1.35.71 to 1.35.76 (#6054)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 10:11:51 +01:00
Pepe Fagoaga
b6c3ba0f0d chore: delete unneeded requirements file (#6056) 2024-12-09 09:07:10 +01:00
dependabot[bot]
32cd39d158 chore(deps-dev): bump coverage from 7.6.8 to 7.6.9 (#6053)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 20:29:06 -04:00
dependabot[bot]
203275817f chore(deps-dev): bump pytest from 8.3.3 to 8.3.4 (#5992)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 12:53:11 -04:00
dependabot[bot]
c05c3396b5 chore(deps-dev): bump mkdocs-material from 9.5.46 to 9.5.47 (#5988)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 11:56:37 -04:00
dependabot[bot]
8f172aec8a chore(deps-dev): bump pylint from 3.3.1 to 3.3.2 (#5993)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 11:04:28 -04:00
dependabot[bot]
263a7e2134 chore(deps): bump botocore from 1.35.71 to 1.35.76 (#6037) 2024-12-06 09:41:57 -04:00
dependabot[bot]
a2ea216604 chore(deps): bump slack-sdk from 3.33.4 to 3.33.5 (#6039)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 08:44:00 -04:00
dependabot[bot]
77c572f990 chore(deps): bump trufflesecurity/trufflehog from 3.84.1 to 3.85.0 (#6040)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 08:38:14 -04:00
Prowler Bot
bb0c346c4d chore(regions_update): Changes in regions for AWS services (#6041)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-12-06 08:38:03 -04:00
Daniel Barranquero
2ce8e1fd21 fix(backup): modify list recovery points call (#5996) 2024-12-06 08:35:29 -04:00
Pepe Fagoaga
ecfd94aeb1 fix(codecov): create components (#6028) 2024-12-05 16:35:56 +01:00
Pedro Martín
eddc672264 chore(version): update prowler version (#6027) 2024-12-05 13:51:13 +01:00
Pedro Martín
8c71a39487 docs(prowler-app): add link to https://api.prowler.com/api/v1/docs (#6016) 2024-12-05 11:01:51 +01:00
Pedro Martín
ff0ac27723 docs(index): update index with images (#6015) 2024-12-05 11:01:42 +01:00
Víctor Fernández Poyatos
ad7134d283 fix(tenant): fix delete tenants behavior (#6013) 2024-12-04 13:57:16 +01:00
Pablo Lara
58723ae52e fix(invitations): remove wrong url (#6005) 2024-12-03 21:08:31 +01:00
261 changed files with 19625 additions and 2184 deletions

View File

@@ -1,3 +1,3 @@
name: "Custom CodeQL Config for API"
name: "API - CodeQL Config"
paths:
- 'api/'
- "api/"

View File

@@ -1,4 +0,0 @@
name: "Custom CodeQL Config"
paths-ignore:
- 'api/'
- 'ui/'

4
.github/codeql/sdk-codeql-config.yml vendored Normal file
View File

@@ -0,0 +1,4 @@
name: "SDK - CodeQL Config"
paths-ignore:
- "api/"
- "ui/"

View File

@@ -1,3 +1,3 @@
name: "Custom CodeQL Config for UI"
name: "UI - CodeQL Config"
paths:
- "ui/"

View File

@@ -5,6 +5,7 @@
version: 2
updates:
# v5
- package-ecosystem: "pip"
directory: "/"
schedule:
@@ -14,6 +15,7 @@ updates:
labels:
- "dependencies"
- "pip"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
@@ -24,20 +26,76 @@ updates:
- "dependencies"
- "github_actions"
- package-ecosystem: "pip"
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "npm"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "docker"
# v4.6
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "pip"
- "v4"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "github_actions"
- "v4"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "docker"
- "v4"
# v3
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "pip"
- "v3"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:

5
.github/labeler.yml vendored
View File

@@ -22,6 +22,11 @@ provider/kubernetes:
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
- any-glob-to-any-file: "tests/providers/kubernetes/**"
provider/github:
- changed-files:
- any-glob-to-any-file: "prowler/providers/github/**"
- any-glob-to-any-file: "tests/providers/github/**"
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"

View File

@@ -0,0 +1,81 @@
name: API - Build and Push containers
on:
push:
branches:
- "master"
paths:
- "api/**"
- ".github/workflows/api-build-lint-push-containers.yml"
# Uncomment the code below to test this action on PRs
# pull_request:
# branches:
# - "master"
# paths:
# - "api/**"
# - ".github/workflows/api-build-lint-push-containers.yml"
release:
types: [published]
env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
WORKING_DIRECTORY: ./api
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
jobs:
# Build Prowler OSS container
container-build-push:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Repository check
working-directory: /tmp
run: |
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -9,11 +9,11 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "API - CodeQL"
name: API - CodeQL
on:
push:
branches:
branches:
- "master"
- "v3"
- "v4.*"
@@ -21,7 +21,7 @@ on:
paths:
- "api/**"
pull_request:
branches:
branches:
- "master"
- "v3"
- "v4.*"

View File

@@ -1,4 +1,4 @@
name: "API - Pull Request"
name: API - Pull Request
on:
push:
@@ -69,6 +69,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
@@ -80,18 +81,21 @@ jobs:
api/permissions/**
api/README.md
api/mkdocs.yml
- name: Install poetry
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
@@ -109,48 +113,59 @@ jobs:
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry lock --check
- name: Lint with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff check . --exclude contrib
- name: Check Format with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff format --check . --exclude contrib
- name: Lint with pylint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
- name: Bandit
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612,66963
- name: Vulture
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
- name: Hadolint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: api

View File

@@ -1,42 +1,47 @@
name: Automatic Backport
name: Prowler - Automatic Backport
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
env:
# The prefix of the label that triggers the backport must not contain the branch name
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
BACKPORT_LABEL_PREFIX: backport-to-
BACKPORT_LABEL_IGNORE: was-backported
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
runs-on: ubuntu-latest
permissions:
id-token: write
pull-requests: write
contents: write
steps:
# Workaround not to fail the workflow if the PR does not need a backport
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
- name: Check for backport labels
id: check_labels
run: |-
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
echo "$labels"
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
echo "matched=$matched"
echo "matched=$matched" >> $GITHUB_OUTPUT
- name: Check labels
id: preview_label_check
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
with:
allow_failure: true
prefix_mode: true
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
repo_token: ${{ secrets.GITHUB_TOKEN }}
- name: Backport Action
if: fromJSON(steps.check_labels.outputs.matched) > 0
if: steps.preview_label_check.outputs.label_check == 'success'
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: backport-to-
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
- name: Info log
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
run: cat ~/.backport/backport.debug.log

View File

@@ -1,4 +1,4 @@
name: Pull Request Documentation Link
name: Prowler - Pull Request Documentation Link
on:
pull_request:

View File

@@ -1,4 +1,4 @@
name: Find secrets
name: Prowler - Find secrets
on: pull_request
@@ -11,9 +11,9 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.84.1
uses: trufflesecurity/trufflehog@v3.87.0
with:
path: ./
base: ${{ github.event.repository.default_branch }}
head: HEAD
extra_args: --only-verified
extra_args: --only-verified

View File

@@ -1,4 +1,4 @@
name: "Pull Request Labeler"
name: Prowler - PR Labeler
on:
pull_request_target:

View File

@@ -1,9 +1,13 @@
name: Build and Push containers
name: SDK - Build and Push containers
on:
push:
branches:
# For `v3-latest`
- "v3"
# For `v4-latest`
- "v4.6"
# For `latest`
- "master"
paths-ignore:
- ".github/**"
@@ -85,8 +89,8 @@ jobs:
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
;;
4)
4)
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
;;

View File

@@ -9,11 +9,11 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
name: SDK - CodeQL
on:
push:
branches:
branches:
- "master"
- "v3"
- "v4.*"
@@ -21,7 +21,7 @@ on:
- 'ui/**'
- 'api/**'
pull_request:
branches:
branches:
- "master"
- "v3"
- "v4.*"
@@ -55,7 +55,7 @@ jobs:
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/codeql-config.yml
config-file: ./.github/codeql/sdk-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@@ -1,4 +1,4 @@
name: "Pull Request"
name: SDK - Pull Request
on:
push:
@@ -22,6 +22,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
@@ -36,17 +37,20 @@ jobs:
README.md
mkdocs.yml
.backportrc.json
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
@@ -57,44 +61,56 @@ jobs:
sed -E 's/.*"v([^"]+)".*/\1/' \
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
&& chmod +x /tmp/hadolint
- name: Poetry check
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry lock --check
- name: Lint with flake8
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
- name: Checking format with black
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run black --exclude api ui --check .
- name: Lint with pylint
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
- name: Bandit
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
- name: Safety
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612 -r pyproject.toml
- name: Vulture
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
- name: Hadolint
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler

View File

@@ -1,4 +1,4 @@
name: PyPI release
name: SDK - PyPI release
on:
release:

View File

@@ -1,6 +1,6 @@
# This is a basic workflow to help you get started with Actions
name: Refresh regions of AWS services
name: SDK - Refresh AWS services' regions
on:
schedule:

View File

@@ -0,0 +1,81 @@
name: UI - Build and Push containers
on:
push:
branches:
- "master"
paths:
- "ui/**"
- ".github/workflows/ui-build-lint-push-containers.yml"
# Uncomment the below code to test this action on PRs
# pull_request:
# branches:
# - "master"
# paths:
# - "ui/**"
# - ".github/workflows/ui-build-lint-push-containers.yml"
release:
types: [published]
env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
WORKING_DIRECTORY: ./ui
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
jobs:
# Build Prowler OSS container
container-build-push:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Repository check
working-directory: /tmp
run: |
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -9,7 +9,7 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "UI - CodeQL"
name: UI - CodeQL
on:
push:

View File

@@ -1,4 +1,4 @@
name: "UI - Pull Request"
name: UI - Pull Request
on:
pull_request:
@@ -20,7 +20,7 @@ jobs:
with:
persist-credentials: false
- name: Setup Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Install dependencies
@@ -31,4 +31,4 @@ jobs:
run: npm run healthcheck
- name: Build the application
working-directory: ./ui
run: npm run build
run: npm run build

View File

@@ -1,4 +1,4 @@
FROM python:3.12-alpine
FROM python:3.12.8-alpine3.20
LABEL maintainer="https://github.com/prowler-cloud/prowler"

View File

@@ -3,7 +3,7 @@
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
</p>
<p align="center">
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
</p>
<p align="center">
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
@@ -29,7 +29,7 @@
<p align="center">
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler?include_prereleases"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
@@ -43,7 +43,7 @@
# Description
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
@@ -98,6 +98,7 @@ curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/mast
docker compose up -d
```
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment.
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
### From GitHub
@@ -139,6 +140,19 @@ cd src/backend
python -m celery -A config.celery worker -l info -E
```
**Commands to run the API Scheduler**
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
set -a
source .env
cd src/backend
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
```
**Commands to run the UI**
``` console

View File

@@ -1,4 +1,4 @@
FROM python:3.12-alpine AS build
FROM python:3.12.8-alpine3.20 AS build
LABEL maintainer="https://github.com/prowler-cloud/api"

1956
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,7 @@ description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
name = "prowler-api"
package-mode = false
version = "1.0.0"
version = "1.1.0"
[tool.poetry.dependencies]
celery = {extras = ["pytest"], version = "^5.4.0"}
@@ -27,7 +27,7 @@ drf-nested-routers = "^0.94.1"
drf-spectacular = "0.27.2"
drf-spectacular-jsonapi = "0.5.1"
gunicorn = "23.0.0"
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "master"}
prowler = {git = "https://github.com/prowler-cloud/prowler.git", tag = "5.0.0"}
psycopg2-binary = "2.9.9"
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
# Needed for prowler compatibility

View File

@@ -1,20 +1,23 @@
import uuid
from django.db import transaction, connection
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from rest_framework import permissions
from rest_framework.exceptions import NotAuthenticated
from rest_framework.filters import SearchFilter
from rest_framework_json_api import filters
from rest_framework_json_api.serializers import ValidationError
from rest_framework_json_api.views import ModelViewSet
from rest_framework_simplejwt.authentication import JWTAuthentication
from api.db_router import MainRouter
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
from api.filters import CustomDjangoFilterBackend
from api.models import Role, Tenant
from api.rbac.permissions import HasPermissions
class BaseViewSet(ModelViewSet):
authentication_classes = [JWTAuthentication]
permission_classes = [permissions.IsAuthenticated]
required_permissions = []
permission_classes = [permissions.IsAuthenticated, HasPermissions]
filter_backends = [
filters.QueryParameterValidationFilter,
filters.OrderingFilter,
@@ -28,6 +31,17 @@ class BaseViewSet(ModelViewSet):
ordering_fields = "__all__"
ordering = ["id"]
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.set_required_permissions()
super().initial(request, *args, **kwargs)
def set_required_permissions(self):
"""This is an abstract method that must be implemented by subclasses."""
NotImplemented
def get_queryset(self):
raise NotImplementedError
@@ -47,13 +61,7 @@ class BaseRLSViewSet(BaseViewSet):
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)
@@ -66,13 +74,60 @@ class BaseRLSViewSet(BaseViewSet):
class BaseTenantViewset(BaseViewSet):
def dispatch(self, request, *args, **kwargs):
with transaction.atomic():
return super().dispatch(request, *args, **kwargs)
tenant = super().dispatch(request, *args, **kwargs)
try:
# If the request is a POST, create the admin role
if request.method == "POST":
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
except Exception as e:
self._handle_creation_error(e, tenant)
raise
return tenant
def _create_admin_role(self, tenant_id):
Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant_id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
def _handle_creation_error(self, error, tenant):
if tenant.data.get("id"):
try:
Tenant.objects.using(MainRouter.admin_db).filter(
id=tenant.data["id"]
).delete()
except ObjectDoesNotExist:
pass # Tenant might not exist, handle gracefully
def initial(self, request, *args, **kwargs):
user_id = str(request.user.id)
if (
request.resolver_match.url_name != "tenant-detail"
and request.method != "DELETE"
):
user_id = str(request.user.id)
with connection.cursor() as cursor:
cursor.execute(f"SELECT set_config('api.user_id', '{user_id}', TRUE);")
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
return super().initial(request, *args, **kwargs)
# TODO: DRY this when we have time
if request.auth is None:
raise NotAuthenticated
tenant_id = request.auth.get("tenant_id")
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)
@@ -92,12 +147,6 @@ class BaseUserViewset(BaseViewSet):
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)

View File

@@ -1,4 +1,5 @@
import secrets
import uuid
from contextlib import contextmanager
from datetime import datetime, timedelta, timezone
@@ -8,6 +9,7 @@ from django.core.paginator import Paginator
from django.db import connection, models, transaction
from psycopg2 import connect as psycopg2_connect
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
from rest_framework_json_api.serializers import ValidationError
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
DB_PASSWORD = (
@@ -23,6 +25,8 @@ TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
POSTGRES_TENANT_VAR = "api.tenant_id"
POSTGRES_USER_VAR = "api.user_id"
SET_CONFIG_QUERY = "SELECT set_config(%s, %s::text, TRUE);"
@contextmanager
def psycopg_connection(database_alias: str):
@@ -44,10 +48,23 @@ def psycopg_connection(database_alias: str):
@contextmanager
def tenant_transaction(tenant_id: str):
def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
"""
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
if the value is a valid UUID.
Args:
value (str): Database configuration parameter value.
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
"""
with transaction.atomic():
with connection.cursor() as cursor:
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
try:
# just in case the value is an UUID object
uuid.UUID(str(value))
except ValueError:
raise ValidationError("Must be a valid UUID")
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
yield cursor

View File

@@ -1,6 +1,10 @@
import uuid
from functools import wraps
from django.db import connection, transaction
from rest_framework_json_api.serializers import ValidationError
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
def set_tenant(func):
@@ -31,7 +35,7 @@ def set_tenant(func):
pass
# When calling the task
some_task.delay(arg1, tenant_id="1234-abcd-5678")
some_task.delay(arg1, tenant_id="8db7ca86-03cc-4d42-99f6-5e480baf6ab5")
# The tenant context will be set before the task logic executes.
"""
@@ -43,9 +47,12 @@ def set_tenant(func):
tenant_id = kwargs.pop("tenant_id")
except KeyError:
raise KeyError("This task requires the tenant_id")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
return func(*args, **kwargs)

View File

@@ -26,11 +26,13 @@ from api.models import (
Finding,
Invitation,
Membership,
PermissionChoices,
Provider,
ProviderGroup,
ProviderSecret,
Resource,
ResourceTag,
Role,
Scan,
ScanSummary,
SeverityChoices,
@@ -481,6 +483,26 @@ class UserFilter(FilterSet):
}
class RoleFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
permission_state = ChoiceFilter(
choices=PermissionChoices.choices, method="filter_permission_state"
)
def filter_permission_state(self, queryset, name, value):
return Role.filter_by_permission_state(queryset, value)
class Meta:
model = Role
fields = {
"id": ["exact", "in"],
"name": ["exact", "in"],
"inserted_at": ["gte", "lte"],
"updated_at": ["gte", "lte"],
}
class ComplianceOverviewFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
@@ -521,3 +543,25 @@ class ScanSummaryFilter(FilterSet):
"inserted_at": ["date", "gte", "lte"],
"region": ["exact", "icontains", "in"],
}
class ServiceOverviewFilter(ScanSummaryFilter):
muted_findings = None
def is_valid(self):
# Check if at least one of the inserted_at filters is present
inserted_at_filters = [
self.data.get("inserted_at"),
self.data.get("inserted_at__gte"),
self.data.get("inserted_at__lte"),
]
if not any(inserted_at_filters):
raise ValidationError(
{
"inserted_at": [
"At least one of filter[inserted_at], filter[inserted_at__gte], or "
"filter[inserted_at__lte] is required."
]
}
)
return super().is_valid()

View File

@@ -58,5 +58,96 @@
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
"inserted_at": "2024-11-13T11:55:41.237Z"
}
},
{
"model": "api.role",
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "admin_test",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": true,
"manage_scans": true,
"unlimited_visibility": true,
"inserted_at": "2024-11-20T15:32:42.402Z",
"updated_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.role",
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "first_role",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": false,
"manage_scans": false,
"unlimited_visibility": true,
"inserted_at": "2024-11-20T15:31:53.239Z",
"updated_at": "2024-11-20T15:31:53.239Z"
}
},
{
"model": "api.role",
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "third_role",
"manage_users": false,
"manage_account": false,
"manage_billing": false,
"manage_providers": false,
"manage_integrations": false,
"manage_scans": true,
"unlimited_visibility": false,
"inserted_at": "2024-11-20T15:34:05.440Z",
"updated_at": "2024-11-20T15:34:05.440Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.userrolerelationship",
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"inserted_at": "2024-11-20T15:36:14.302Z"
}
}
]

View File

@@ -0,0 +1,246 @@
# Generated by Django 5.1.1 on 2024-12-05 12:29
import api.rls
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0002_token_migrations"),
]
operations = [
migrations.CreateModel(
name="Role",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("name", models.CharField(max_length=255)),
("manage_users", models.BooleanField(default=False)),
("manage_account", models.BooleanField(default=False)),
("manage_billing", models.BooleanField(default=False)),
("manage_providers", models.BooleanField(default=False)),
("manage_integrations", models.BooleanField(default=False)),
("manage_scans", models.BooleanField(default=False)),
("unlimited_visibility", models.BooleanField(default=False)),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "roles",
},
),
migrations.CreateModel(
name="RoleProviderGroupRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_provider_group_relationship",
},
),
migrations.CreateModel(
name="UserRoleRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_user_relationship",
},
),
migrations.AddField(
model_name="roleprovidergrouprelationship",
name="provider_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
),
),
migrations.AddField(
model_name="roleprovidergrouprelationship",
name="role",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
migrations.AddField(
model_name="role",
name="provider_groups",
field=models.ManyToManyField(
related_name="roles",
through="api.RoleProviderGroupRelationship",
to="api.providergroup",
),
),
migrations.AddField(
model_name="userrolerelationship",
name="role",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
migrations.AddField(
model_name="userrolerelationship",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="role",
name="users",
field=models.ManyToManyField(
related_name="roles",
through="api.UserRoleRelationship",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AddConstraint(
model_name="roleprovidergrouprelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "provider_group_id"),
name="unique_role_provider_group_relationship",
),
),
migrations.AddConstraint(
model_name="roleprovidergrouprelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_roleprovidergrouprelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="userrolerelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "user_id"), name="unique_role_user_relationship"
),
),
migrations.AddConstraint(
model_name="userrolerelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_userrolerelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="role",
constraint=models.UniqueConstraint(
fields=("tenant_id", "name"), name="unique_role_per_tenant"
),
),
migrations.AddConstraint(
model_name="role",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_role",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.CreateModel(
name="InvitationRoleRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"invitation",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
),
),
(
"role",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_invitation_relationship",
},
),
migrations.AddConstraint(
model_name="invitationrolerelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "invitation_id"),
name="unique_role_invitation_relationship",
),
),
migrations.AddConstraint(
model_name="invitationrolerelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_invitationrolerelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddField(
model_name="role",
name="invitations",
field=models.ManyToManyField(
related_name="roles",
through="api.InvitationRoleRelationship",
to="api.invitation",
),
),
]

View File

@@ -0,0 +1,43 @@
from django.db import migrations
from api.db_router import MainRouter
def create_admin_role(apps, schema_editor):
Tenant = apps.get_model("api", "Tenant")
Role = apps.get_model("api", "Role")
User = apps.get_model("api", "User")
UserRoleRelationship = apps.get_model("api", "UserRoleRelationship")
for tenant in Tenant.objects.using(MainRouter.admin_db).all():
admin_role, _ = Role.objects.using(MainRouter.admin_db).get_or_create(
name="admin",
tenant=tenant,
defaults={
"manage_users": True,
"manage_account": True,
"manage_billing": True,
"manage_providers": True,
"manage_integrations": True,
"manage_scans": True,
"unlimited_visibility": True,
},
)
users = User.objects.using(MainRouter.admin_db).filter(
membership__tenant=tenant
)
for user in users:
UserRoleRelationship.objects.using(MainRouter.admin_db).get_or_create(
user=user,
role=admin_role,
tenant=tenant,
)
class Migration(migrations.Migration):
dependencies = [
("api", "0003_rbac"),
]
operations = [
migrations.RunPython(create_admin_role),
]

View File

@@ -69,6 +69,21 @@ class StateChoices(models.TextChoices):
CANCELLED = "cancelled", _("Cancelled")
class PermissionChoices(models.TextChoices):
"""
Represents the different permission states that a role can have.
Attributes:
UNLIMITED: Indicates that the role possesses all permissions.
LIMITED: Indicates that the role has some permissions but not all.
NONE: Indicates that the role does not have any permissions.
"""
UNLIMITED = "unlimited", _("Unlimited permissions")
LIMITED = "limited", _("Limited permissions")
NONE = "none", _("No permissions")
class ActiveProviderManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(self.active_provider_filter())
@@ -294,23 +309,14 @@ class ProviderGroup(RowLevelSecurityProtectedModel):
]
class JSONAPIMeta:
resource_name = "provider-groups"
resource_name = "provider-group"
class ProviderGroupMembership(RowLevelSecurityProtectedModel):
objects = ActiveProviderManager()
all_objects = models.Manager()
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
provider = models.ForeignKey(
Provider,
on_delete=models.CASCADE,
)
provider_group = models.ForeignKey(
ProviderGroup,
on_delete=models.CASCADE,
)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "provider_group_memberships"
@@ -327,7 +333,7 @@ class ProviderGroupMembership(RowLevelSecurityProtectedModel):
]
class JSONAPIMeta:
resource_name = "provider-group-memberships"
resource_name = "provider_groups-provider"
class Task(RowLevelSecurityProtectedModel):
@@ -851,6 +857,150 @@ class Invitation(RowLevelSecurityProtectedModel):
resource_name = "invitations"
class Role(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
name = models.CharField(max_length=255)
manage_users = models.BooleanField(default=False)
manage_account = models.BooleanField(default=False)
manage_billing = models.BooleanField(default=False)
manage_providers = models.BooleanField(default=False)
manage_integrations = models.BooleanField(default=False)
manage_scans = models.BooleanField(default=False)
unlimited_visibility = models.BooleanField(default=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
provider_groups = models.ManyToManyField(
ProviderGroup, through="RoleProviderGroupRelationship", related_name="roles"
)
users = models.ManyToManyField(
User, through="UserRoleRelationship", related_name="roles"
)
invitations = models.ManyToManyField(
Invitation, through="InvitationRoleRelationship", related_name="roles"
)
# Filter permission_state
PERMISSION_FIELDS = [
"manage_users",
"manage_account",
"manage_billing",
"manage_providers",
"manage_integrations",
"manage_scans",
]
@property
def permission_state(self):
values = [getattr(self, field) for field in self.PERMISSION_FIELDS]
if all(values):
return PermissionChoices.UNLIMITED
elif not any(values):
return PermissionChoices.NONE
else:
return PermissionChoices.LIMITED
@classmethod
def filter_by_permission_state(cls, queryset, value):
q_all_true = Q(**{field: True for field in cls.PERMISSION_FIELDS})
q_all_false = Q(**{field: False for field in cls.PERMISSION_FIELDS})
if value == PermissionChoices.UNLIMITED:
return queryset.filter(q_all_true)
elif value == PermissionChoices.NONE:
return queryset.filter(q_all_false)
else:
return queryset.exclude(q_all_true | q_all_false)
class Meta:
db_table = "roles"
constraints = [
models.UniqueConstraint(
fields=["tenant_id", "name"],
name="unique_role_per_tenant",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "role"
class RoleProviderGroupRelationship(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "role_provider_group_relationship"
constraints = [
models.UniqueConstraint(
fields=["role_id", "provider_group_id"],
name="unique_role_provider_group_relationship",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "role-provider_groups"
class UserRoleRelationship(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "role_user_relationship"
constraints = [
models.UniqueConstraint(
fields=["role_id", "user_id"],
name="unique_role_user_relationship",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "user-roles"
class InvitationRoleRelationship(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
invitation = models.ForeignKey(Invitation, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "role_invitation_relationship"
constraints = [
models.UniqueConstraint(
fields=["role_id", "invitation_id"],
name="unique_role_invitation_relationship",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "invitation-roles"
class ComplianceOverview(RowLevelSecurityProtectedModel):
objects = ActiveProviderManager()
all_objects = models.Manager()

View File

@@ -0,0 +1,70 @@
from enum import Enum
from rest_framework.permissions import BasePermission
from api.models import Provider, Role, User
from api.db_router import MainRouter
from typing import Optional
from django.db.models import QuerySet
class Permissions(Enum):
MANAGE_USERS = "manage_users"
MANAGE_ACCOUNT = "manage_account"
MANAGE_BILLING = "manage_billing"
MANAGE_PROVIDERS = "manage_providers"
MANAGE_INTEGRATIONS = "manage_integrations"
MANAGE_SCANS = "manage_scans"
UNLIMITED_VISIBILITY = "unlimited_visibility"
class HasPermissions(BasePermission):
"""
Custom permission to check if the user's role has the required permissions.
The required permissions should be specified in the view as a list in `required_permissions`.
"""
def has_permission(self, request, view):
required_permissions = getattr(view, "required_permissions", [])
if not required_permissions:
return True
user_roles = (
User.objects.using(MainRouter.admin_db).get(id=request.user.id).roles.all()
)
if not user_roles:
return False
for perm in required_permissions:
if not getattr(user_roles[0], perm.value, False):
return False
return True
def get_role(user: User) -> Optional[Role]:
"""
Retrieve the first role assigned to the given user.
Returns:
The user's first Role instance if the user has any roles, otherwise None.
"""
return user.roles.first()
def get_providers(role: Role) -> QuerySet[Provider]:
"""
Return a distinct queryset of Providers accessible by the given role.
If the role has no associated provider groups, an empty queryset is returned.
Args:
role: A Role instance.
Returns:
A QuerySet of Provider objects filtered by the role's provider groups.
If the role has no provider groups, returns an empty queryset.
"""
provider_groups = role.provider_groups.all()
if not provider_groups.exists():
return Provider.objects.none()
return Provider.objects.filter(provider_groups__in=provider_groups).distinct()

View File

@@ -2,7 +2,7 @@ from contextlib import nullcontext
from rest_framework_json_api.renderers import JSONRenderer
from api.db_utils import tenant_transaction
from api.db_utils import rls_transaction
class APIJSONRenderer(JSONRenderer):
@@ -13,9 +13,9 @@ class APIJSONRenderer(JSONRenderer):
tenant_id = getattr(request, "tenant_id", None) if request else None
include_param_present = "include" in request.query_params if request else False
# Use tenant_transaction if needed for included resources, otherwise do nothing
# Use rls_transaction if needed for included resources, otherwise do nothing
context_manager = (
tenant_transaction(tenant_id)
rls_transaction(tenant_id)
if tenant_id and include_param_present
else nullcontext()
)

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,10 @@
import pytest
from django.urls import reverse
from unittest.mock import patch
from rest_framework.test import APIClient
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
@patch("api.v1.views.MainRouter.admin_db", new="default")
@pytest.mark.django_db
def test_basic_authentication():
client = APIClient()

View File

@@ -13,6 +13,7 @@ def test_check_resources_between_different_tenants(
enforce_test_user_db_connection,
authenticated_api_client,
tenants_fixture,
set_user_admin_roles_fixture,
):
client = authenticated_api_client

View File

@@ -6,8 +6,10 @@ from django.db.utils import ConnectionRouter
from api.db_router import MainRouter
from api.rls import Tenant
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
from unittest.mock import patch
@patch("api.db_router.MainRouter.admin_db", new="admin")
class TestMainDatabaseRouter:
@pytest.fixture(scope="module")
def router(self):

View File

@@ -1,7 +1,9 @@
from unittest.mock import patch, call
import uuid
from unittest.mock import call, patch
import pytest
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
from api.decorators import set_tenant
@@ -15,12 +17,12 @@ class TestSetTenantDecorator:
def random_func(arg):
return arg
tenant_id = "1234-abcd-5678"
tenant_id = str(uuid.uuid4())
result = random_func("test_arg", tenant_id=tenant_id)
assert (
call(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
call(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
in mock_cursor.execute.mock_calls
)
assert result == "test_arg"

View File

@@ -0,0 +1,306 @@
import pytest
from django.urls import reverse
from rest_framework import status
from unittest.mock import patch, ANY, Mock
@pytest.mark.django_db
class TestUserViewSet:
def test_list_users_with_all_permissions(self, authenticated_client_rbac):
response = authenticated_client_rbac.get(reverse("user-list"))
assert response.status_code == status.HTTP_200_OK
assert isinstance(response.json()["data"], list)
def test_list_users_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
response = authenticated_client_no_permissions_rbac.get(reverse("user-list"))
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_retrieve_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.get(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
)
assert response.status_code == status.HTTP_200_OK
assert (
response.json()["data"]["attributes"]["email"]
== create_test_user_rbac.email
)
def test_retrieve_user_with_no_roles(
self, authenticated_client_rbac_noroles, create_test_user_rbac_no_roles
):
response = authenticated_client_rbac_noroles.get(
reverse("user-detail", kwargs={"pk": create_test_user_rbac_no_roles.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_retrieve_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.get(
reverse("user-detail", kwargs={"pk": create_test_user.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
valid_user_payload = {
"name": "test",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_rbac.post(
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
def test_create_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
valid_user_payload = {
"name": "test",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_no_permissions_rbac.post(
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
def test_partial_update_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
updated_data = {
"data": {
"type": "users",
"id": str(create_test_user_rbac.id),
"attributes": {"name": "Updated Name"},
},
}
response = authenticated_client_rbac.patch(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id}),
data=updated_data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["name"] == "Updated Name"
def test_partial_update_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
updated_data = {
"data": {
"type": "users",
"attributes": {"name": "Updated Name"},
}
}
response = authenticated_client_no_permissions_rbac.patch(
reverse("user-detail", kwargs={"pk": create_test_user.id}),
data=updated_data,
format="vnd.api+json",
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_delete_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.delete(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
def test_delete_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.delete(
reverse("user-detail", kwargs={"pk": create_test_user.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_me_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.get(reverse("user-me"))
assert response.status_code == status.HTTP_200_OK
assert (
response.json()["data"]["attributes"]["email"]
== create_test_user_rbac.email
)
def test_me_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.get(reverse("user-me"))
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["email"] == "rbac_limited@rbac.com"
@pytest.mark.django_db
class TestProviderViewSet:
def test_list_providers_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
response = authenticated_client_rbac.get(reverse("provider-list"))
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(providers_fixture)
def test_list_providers_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
response = authenticated_client_no_permissions_rbac.get(
reverse("provider-list")
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 0
def test_retrieve_provider_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_rbac.get(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["alias"] == provider.alias
def test_retrieve_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.get(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_create_provider_with_all_permissions(self, authenticated_client_rbac):
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
response = authenticated_client_rbac.post(
reverse("provider-list"), data=payload, format="json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["alias"] == "new_alias"
def test_create_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
response = authenticated_client_no_permissions_rbac.post(
reverse("provider-list"), data=payload, format="json"
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_partial_update_provider_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
provider = providers_fixture[0]
payload = {
"data": {
"type": "providers",
"id": provider.id,
"attributes": {"alias": "updated_alias"},
},
}
response = authenticated_client_rbac.patch(
reverse("provider-detail", kwargs={"pk": provider.id}),
data=payload,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["alias"] == "updated_alias"
def test_partial_update_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
update_payload = {
"data": {
"type": "providers",
"attributes": {"alias": "updated_alias"},
}
}
response = authenticated_client_no_permissions_rbac.patch(
reverse("provider-detail", kwargs={"pk": provider.id}),
data=update_payload,
format="vnd.api+json",
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@patch("api.v1.views.Task.objects.get")
@patch("api.v1.views.delete_provider_task.delay")
def test_delete_provider_with_all_permissions(
self,
mock_delete_task,
mock_task_get,
authenticated_client_rbac,
providers_fixture,
tasks_fixture,
):
prowler_task = tasks_fixture[0]
task_mock = Mock()
task_mock.id = prowler_task.id
mock_delete_task.return_value = task_mock
mock_task_get.return_value = prowler_task
provider1, *_ = providers_fixture
response = authenticated_client_rbac.delete(
reverse("provider-detail", kwargs={"pk": provider1.id})
)
assert response.status_code == status.HTTP_202_ACCEPTED
mock_delete_task.assert_called_once_with(
provider_id=str(provider1.id), tenant_id=ANY
)
assert "Content-Location" in response.headers
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
def test_delete_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.delete(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@patch("api.v1.views.Task.objects.get")
@patch("api.v1.views.check_provider_connection_task.delay")
def test_connection_with_all_permissions(
self,
mock_provider_connection,
mock_task_get,
authenticated_client_rbac,
providers_fixture,
tasks_fixture,
):
prowler_task = tasks_fixture[0]
task_mock = Mock()
task_mock.id = prowler_task.id
task_mock.status = "PENDING"
mock_provider_connection.return_value = task_mock
mock_task_get.return_value = prowler_task
provider1, *_ = providers_fixture
assert provider1.connected is None
assert provider1.connection_last_checked_at is None
response = authenticated_client_rbac.post(
reverse("provider-connection", kwargs={"pk": provider1.id})
)
assert response.status_code == status.HTTP_202_ACCEPTED
mock_provider_connection.assert_called_once_with(
provider_id=str(provider1.id), tenant_id=ANY
)
assert "Content-Location" in response.headers
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
def test_connection_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.post(
reverse("provider-connection", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN

View File

@@ -15,9 +15,12 @@ from api.models import (
ProviderGroup,
ProviderGroupMembership,
ProviderSecret,
Role,
RoleProviderGroupRelationship,
Scan,
StateChoices,
User,
UserRoleRelationship,
)
from api.rls import Tenant
@@ -50,7 +53,6 @@ class TestUserViewSet:
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["email"] == create_test_user.email
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_users_create(self, client):
valid_user_payload = {
"name": "test",
@@ -67,7 +69,6 @@ class TestUserViewSet:
== valid_user_payload["email"].lower()
)
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_users_create_duplicated_email(self, client):
# Create a user
self.test_users_create(client)
@@ -122,7 +123,6 @@ class TestUserViewSet:
"NonExistentEmail@prowler.com",
],
)
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_users_create_used_email(self, authenticated_client, email):
# First user created; no errors should occur
user_payload = {
@@ -418,13 +418,23 @@ class TestTenantViewSet:
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_tenants_delete(self, authenticated_client, tenants_fixture):
@patch("api.v1.views.delete_tenant_task.apply_async")
def test_tenants_delete(
self, delete_tenant_mock, authenticated_client, tenants_fixture
):
def _delete_tenant(kwargs):
Tenant.objects.filter(pk=kwargs.get("tenant_id")).delete()
delete_tenant_mock.side_effect = _delete_tenant
tenant1, *_ = tenants_fixture
response = authenticated_client.delete(
reverse("tenant-detail", kwargs={"pk": tenant1.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
assert Tenant.objects.count() == len(tenants_fixture) - 1
assert Membership.objects.filter(tenant_id=tenant1.id).count() == 0
# User is not deleted because it has another membership
assert User.objects.count() == 1
def test_tenants_delete_invalid(self, authenticated_client):
response = authenticated_client.delete(
@@ -533,6 +543,7 @@ class TestTenantViewSet:
# Test user + 2 extra users for tenant 2
assert len(response.json()["data"]) == 3
@patch("api.v1.views.TenantMembersViewSet.required_permissions", [])
def test_tenants_list_memberships_as_member(
self, authenticated_client, tenants_fixture, extra_users
):
@@ -804,7 +815,7 @@ class TestProviderViewSet:
@pytest.mark.parametrize(
"include_values, expected_resources",
[
("provider_groups", ["provider-groups"]),
("provider_groups", ["provider-group"]),
],
)
def test_providers_list_include(
@@ -1189,7 +1200,7 @@ class TestProviderGroupViewSet:
def test_provider_group_create(self, authenticated_client):
data = {
"data": {
"type": "provider-groups",
"type": "provider-group",
"attributes": {
"name": "Test Provider Group",
},
@@ -1208,7 +1219,7 @@ class TestProviderGroupViewSet:
def test_provider_group_create_invalid(self, authenticated_client):
data = {
"data": {
"type": "provider-groups",
"type": "provider-group",
"attributes": {
# Name is missing
},
@@ -1230,7 +1241,7 @@ class TestProviderGroupViewSet:
data = {
"data": {
"id": str(provider_group.id),
"type": "provider-groups",
"type": "provider-group",
"attributes": {
"name": "Updated Provider Group Name",
},
@@ -1252,7 +1263,7 @@ class TestProviderGroupViewSet:
data = {
"data": {
"id": str(provider_group.id),
"type": "provider-groups",
"type": "provider-group",
"attributes": {
"name": "", # Invalid name
},
@@ -1283,100 +1294,6 @@ class TestProviderGroupViewSet:
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_provider_group_providers_update(
self, authenticated_client, provider_groups_fixture, providers_fixture
):
provider_group = provider_groups_fixture[0]
provider_ids = [str(provider.id) for provider in providers_fixture]
data = {
"data": {
"type": "provider-group-memberships",
"id": str(provider_group.id),
"attributes": {"provider_ids": provider_ids},
}
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
memberships = ProviderGroupMembership.objects.filter(
provider_group=provider_group
)
assert memberships.count() == len(provider_ids)
for membership in memberships:
assert str(membership.provider_id) in provider_ids
def test_provider_group_providers_update_non_existent_provider(
self, authenticated_client, provider_groups_fixture, providers_fixture
):
provider_group = provider_groups_fixture[0]
provider_ids = [str(provider.id) for provider in providers_fixture]
provider_ids[-1] = "1b59e032-3eb6-4694-93a5-df84cd9b3ce2"
data = {
"data": {
"type": "provider-group-memberships",
"id": str(provider_group.id),
"attributes": {"provider_ids": provider_ids},
}
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert (
errors[0]["detail"]
== f"The following provider IDs do not exist: {provider_ids[-1]}"
)
def test_provider_group_providers_update_invalid_provider(
self, authenticated_client, provider_groups_fixture
):
provider_group = provider_groups_fixture[1]
invalid_provider_id = "non-existent-id"
data = {
"data": {
"type": "provider-group-memberships",
"id": str(provider_group.id),
"attributes": {"provider_ids": [invalid_provider_id]},
}
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["detail"] == "Must be a valid UUID."
def test_provider_group_providers_update_invalid_payload(
self, authenticated_client, provider_groups_fixture
):
provider_group = provider_groups_fixture[2]
data = {
# Missing "provider_ids"
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["detail"] == "Received document does not contain primary data"
def test_provider_group_retrieve_not_found(self, authenticated_client):
response = authenticated_client.get(
reverse("providergroup-detail", kwargs={"pk": "non-existent-id"})
@@ -2641,7 +2558,9 @@ class TestInvitationViewSet:
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_invitations_create_valid(self, authenticated_client, create_test_user):
def test_invitations_create_valid(
self, authenticated_client, create_test_user, roles_fixture
):
user = create_test_user
data = {
"data": {
@@ -2650,6 +2569,11 @@ class TestInvitationViewSet:
"email": "any_email@prowler.com",
"expires_at": self.TOMORROW_ISO,
},
"relationships": {
"roles": {
"data": [{"type": "role", "id": str(roles_fixture[0].id)}]
}
},
}
}
response = authenticated_client.post(
@@ -2708,6 +2632,11 @@ class TestInvitationViewSet:
response.json()["errors"][0]["source"]["pointer"]
== "/data/attributes/email"
)
assert response.json()["errors"][1]["code"] == "required"
assert (
response.json()["errors"][1]["source"]["pointer"]
== "/data/relationships/roles"
)
def test_invitations_create_invalid_expires_at(
self, authenticated_client, invitations_fixture
@@ -2734,6 +2663,11 @@ class TestInvitationViewSet:
response.json()["errors"][0]["source"]["pointer"]
== "/data/attributes/expires_at"
)
assert response.json()["errors"][1]["code"] == "required"
assert (
response.json()["errors"][1]["source"]["pointer"]
== "/data/relationships/roles"
)
def test_invitations_partial_update_valid(
self, authenticated_client, invitations_fixture
@@ -2921,7 +2855,6 @@ class TestInvitationViewSet:
== "This invitation cannot be revoked."
)
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_invitations_accept_invitation_new_user(self, client, invitations_fixture):
invitation, *_ = invitations_fixture
@@ -2947,7 +2880,6 @@ class TestInvitationViewSet:
user__email__iexact=invitation.email, tenant=invitation.tenant
).exists()
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_invitations_accept_invitation_existing_user(
self, authenticated_client, create_test_user, tenants_fixture
):
@@ -2972,7 +2904,6 @@ class TestInvitationViewSet:
response = authenticated_client.post(
reverse("invitation-accept"), data=data, format="json"
)
assert response.status_code == status.HTTP_201_CREATED
invitation.refresh_from_db()
assert Membership.objects.filter(
@@ -2980,7 +2911,6 @@ class TestInvitationViewSet:
).exists()
assert invitation.state == Invitation.State.ACCEPTED.value
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_invitations_accept_invitation_invalid_token(self, authenticated_client):
data = {
"invitation_token": "invalid_token",
@@ -2993,7 +2923,6 @@ class TestInvitationViewSet:
assert response.status_code == status.HTTP_404_NOT_FOUND
assert response.json()["errors"][0]["code"] == "not_found"
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_invitations_accept_invitation_invalid_token_expired(
self, authenticated_client, invitations_fixture
):
@@ -3012,7 +2941,6 @@ class TestInvitationViewSet:
assert response.status_code == status.HTTP_410_GONE
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_invitations_accept_invitation_invalid_token_expired_new_user(
self, client, invitations_fixture
):
@@ -3036,7 +2964,6 @@ class TestInvitationViewSet:
assert response.status_code == status.HTTP_410_GONE
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_invitations_accept_invitation_invalid_token_accepted(
self, authenticated_client, invitations_fixture
):
@@ -3060,7 +2987,6 @@ class TestInvitationViewSet:
== "This invitation is no longer valid."
)
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_invitations_accept_invitation_invalid_token_revoked(
self, authenticated_client, invitations_fixture
):
@@ -3155,6 +3081,620 @@ class TestInvitationViewSet:
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
class TestRoleViewSet:
def test_role_list(self, authenticated_client, roles_fixture):
response = authenticated_client.get(reverse("role-list"))
assert response.status_code == status.HTTP_200_OK
assert (
len(response.json()["data"]) == len(roles_fixture) + 2
) # 2 default admin roles, one for each tenant
def test_role_retrieve(self, authenticated_client, roles_fixture):
role = roles_fixture[0]
response = authenticated_client.get(
reverse("role-detail", kwargs={"pk": role.id})
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data["id"] == str(role.id)
assert data["attributes"]["name"] == role.name
@pytest.mark.parametrize(
("permission_state", "index"),
[("limited", 0), ("unlimited", 2), ("none", 3)],
)
def test_role_retrieve_permission_state(
self, authenticated_client, roles_fixture, permission_state, index
):
role = roles_fixture[index]
response = authenticated_client.get(
reverse("role-detail", kwargs={"pk": role.id}),
{"filter[permission_state]": permission_state},
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data["id"] == str(role.id)
assert data["attributes"]["name"] == role.name
assert data["attributes"]["permission_state"] == permission_state
def test_role_create(self, authenticated_client):
data = {
"data": {
"type": "role",
"attributes": {
"name": "Test Role",
"manage_users": "false",
"manage_account": "false",
"manage_billing": "false",
"manage_providers": "true",
"manage_integrations": "true",
"manage_scans": "true",
"unlimited_visibility": "true",
},
"relationships": {"provider_groups": {"data": []}},
}
}
response = authenticated_client.post(
reverse("role-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
response_data = response.json()["data"]
assert response_data["attributes"]["name"] == "Test Role"
assert Role.objects.filter(name="Test Role").exists()
def test_role_provider_groups_create(
self, authenticated_client, provider_groups_fixture
):
data = {
"data": {
"type": "role",
"attributes": {
"name": "Test Role",
"manage_users": "false",
"manage_account": "false",
"manage_billing": "false",
"manage_providers": "true",
"manage_integrations": "true",
"manage_scans": "true",
"unlimited_visibility": "true",
},
"relationships": {
"provider_groups": {
"data": [
{"type": "provider-group", "id": str(provider_group.id)}
for provider_group in provider_groups_fixture[:2]
]
}
},
}
}
response = authenticated_client.post(
reverse("role-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
response_data = response.json()["data"]
assert response_data["attributes"]["name"] == "Test Role"
assert Role.objects.filter(name="Test Role").exists()
relationships = (
Role.objects.filter(name="Test Role").first().provider_groups.all()
)
assert relationships.count() == 2
for relationship in relationships:
assert relationship.id in [pg.id for pg in provider_groups_fixture[:2]]
def test_role_create_invalid(self, authenticated_client):
data = {
"data": {
"type": "role",
"attributes": {
# Name is missing
},
}
}
response = authenticated_client.post(
reverse("role-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["source"]["pointer"] == "/data/attributes/name"
def test_role_partial_update(self, authenticated_client, roles_fixture):
role = roles_fixture[1]
data = {
"data": {
"id": str(role.id),
"type": "role",
"attributes": {
"name": "Updated Provider Group Name",
},
}
}
response = authenticated_client.patch(
reverse("role-detail", kwargs={"pk": role.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
role.refresh_from_db()
assert role.name == "Updated Provider Group Name"
def test_role_partial_update_invalid(self, authenticated_client, roles_fixture):
role = roles_fixture[2]
data = {
"data": {
"id": str(role.id),
"type": "role",
"attributes": {
"name": "", # Invalid name
},
}
}
response = authenticated_client.patch(
reverse("role-detail", kwargs={"pk": role.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["source"]["pointer"] == "/data/attributes/name"
def test_role_destroy(self, authenticated_client, roles_fixture):
role = roles_fixture[2]
response = authenticated_client.delete(
reverse("role-detail", kwargs={"pk": role.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
assert not Role.objects.filter(id=role.id).exists()
def test_role_destroy_invalid(self, authenticated_client):
response = authenticated_client.delete(
reverse("role-detail", kwargs={"pk": "non-existent-id"})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_role_retrieve_not_found(self, authenticated_client):
response = authenticated_client.get(
reverse("role-detail", kwargs={"pk": "non-existent-id"})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_role_list_filters(self, authenticated_client, roles_fixture):
role = roles_fixture[0]
response = authenticated_client.get(
reverse("role-list"), {"filter[name]": role.name}
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert len(data) == 1
assert data[0]["attributes"]["name"] == role.name
def test_role_list_sorting(self, authenticated_client, roles_fixture):
response = authenticated_client.get(reverse("role-list"), {"sort": "name"})
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
names = [
item["attributes"]["name"]
for item in data
if item["attributes"]["name"] != "admin"
]
assert names == sorted(names, key=lambda v: v.lower())
def test_role_invalid_method(self, authenticated_client):
response = authenticated_client.put(reverse("role-list"))
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
@pytest.mark.django_db
class TestUserRoleRelationshipViewSet:
def test_create_relationship(
self, authenticated_client, roles_fixture, create_test_user
):
data = {
"data": [{"type": "role", "id": str(role.id)} for role in roles_fixture[:2]]
}
response = authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(user=create_test_user.id)
assert relationships.count() == 4
for relationship in relationships[2:]: # Skip admin role
assert relationship.role.id in [r.id for r in roles_fixture[:2]]
def test_create_relationship_already_exists(
self, authenticated_client, roles_fixture, create_test_user
):
data = {
"data": [{"type": "role", "id": str(role.id)} for role in roles_fixture[:2]]
}
authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
data = {
"data": [
{"type": "role", "id": str(roles_fixture[0].id)},
]
}
response = authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]["detail"]
assert "already associated" in errors
def test_partial_update_relationship(
self, authenticated_client, roles_fixture, create_test_user
):
data = {
"data": [
{"type": "role", "id": str(roles_fixture[2].id)},
]
}
response = authenticated_client.patch(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(user=create_test_user.id)
assert relationships.count() == 1
assert {rel.role.id for rel in relationships} == {roles_fixture[2].id}
data = {
"data": [
{"type": "role", "id": str(roles_fixture[1].id)},
{"type": "role", "id": str(roles_fixture[2].id)},
]
}
response = authenticated_client.patch(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(user=create_test_user.id)
assert relationships.count() == 2
assert {rel.role.id for rel in relationships} == {
roles_fixture[1].id,
roles_fixture[2].id,
}
def test_destroy_relationship(
self, authenticated_client, roles_fixture, create_test_user
):
response = authenticated_client.delete(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(role=roles_fixture[0].id)
assert relationships.count() == 0
def test_invalid_provider_group_id(self, authenticated_client, create_test_user):
invalid_id = "non-existent-id"
data = {"data": [{"type": "provider-group", "id": invalid_id}]}
response = authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"][0]["detail"]
assert "valid UUID" in errors
@pytest.mark.django_db
class TestRoleProviderGroupRelationshipViewSet:
def test_create_relationship(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
data = {
"data": [
{"type": "provider-group", "id": str(provider_group.id)}
for provider_group in provider_groups_fixture[:2]
]
}
response = authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[0].id
)
assert relationships.count() == 2
for relationship in relationships:
assert relationship.provider_group.id in [
pg.id for pg in provider_groups_fixture[:2]
]
def test_create_relationship_already_exists(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
data = {
"data": [
{"type": "provider-group", "id": str(provider_group.id)}
for provider_group in provider_groups_fixture[:2]
]
}
authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
data=data,
content_type="application/vnd.api+json",
)
data = {
"data": [
{"type": "provider-group", "id": str(provider_groups_fixture[0].id)},
]
}
response = authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]["detail"]
assert "already associated" in errors
def test_partial_update_relationship(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
data = {
"data": [
{"type": "provider-group", "id": str(provider_groups_fixture[1].id)},
]
}
response = authenticated_client.patch(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[2].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[2].id
)
assert relationships.count() == 1
assert {rel.provider_group.id for rel in relationships} == {
provider_groups_fixture[1].id
}
data = {
"data": [
{"type": "provider-group", "id": str(provider_groups_fixture[1].id)},
{"type": "provider-group", "id": str(provider_groups_fixture[2].id)},
]
}
response = authenticated_client.patch(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[2].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[2].id
)
assert relationships.count() == 2
assert {rel.provider_group.id for rel in relationships} == {
provider_groups_fixture[1].id,
provider_groups_fixture[2].id,
}
def test_destroy_relationship(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
response = authenticated_client.delete(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[0].id
)
assert relationships.count() == 0
def test_invalid_provider_group_id(self, authenticated_client, roles_fixture):
invalid_id = "non-existent-id"
data = {"data": [{"type": "provider-group", "id": invalid_id}]}
response = authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[1].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"][0]["detail"]
assert "valid UUID" in errors
@pytest.mark.django_db
class TestProviderGroupMembershipViewSet:
def test_create_relationship(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(provider.id)}
for provider in providers_fixture[:2]
]
}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group.id
)
assert relationships.count() == 2
for relationship in relationships:
assert relationship.provider.id in [p.id for p in providers_fixture[:2]]
def test_create_relationship_already_exists(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(provider.id)}
for provider in providers_fixture[:2]
]
}
authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
data = {
"data": [
{"type": "provider", "id": str(providers_fixture[0].id)},
]
}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]["detail"]
assert "already associated" in errors
def test_partial_update_relationship(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(providers_fixture[1].id)},
]
}
response = authenticated_client.patch(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group.id
)
assert relationships.count() == 1
assert {rel.provider.id for rel in relationships} == {providers_fixture[1].id}
data = {
"data": [
{"type": "provider", "id": str(providers_fixture[1].id)},
{"type": "provider", "id": str(providers_fixture[2].id)},
]
}
response = authenticated_client.patch(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group.id
)
assert relationships.count() == 2
assert {rel.provider.id for rel in relationships} == {
providers_fixture[1].id,
providers_fixture[2].id,
}
def test_destroy_relationship(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(provider.id)}
for provider in providers_fixture[:2]
]
}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
response = authenticated_client.delete(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=providers_fixture[0].id
)
assert relationships.count() == 0
def test_invalid_provider_group_id(
self, authenticated_client, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
invalid_id = "non-existent-id"
data = {"data": [{"type": "provider-group", "id": invalid_id}]}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"][0]["detail"]
assert "valid UUID" in errors
@pytest.mark.django_db
class TestComplianceOverviewViewSet:
def test_compliance_overview_list_none(self, authenticated_client):
@@ -3368,7 +3908,37 @@ class TestOverviewViewSet:
resources_fixture
)
# TODO Add more tests for the rest of overviews
def test_overview_services_list_no_required_filters(
self, authenticated_client, scan_summaries_fixture
):
response = authenticated_client.get(reverse("overview-services"))
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_overview_services_list(self, authenticated_client, scan_summaries_fixture):
response = authenticated_client.get(
reverse("overview-services"), {"filter[inserted_at]": TODAY}
)
assert response.status_code == status.HTTP_200_OK
# Only two different services
assert len(response.json()["data"]) == 2
# Fixed data from the fixture, TODO improve this at some point with something more dynamic
service1_data = response.json()["data"][0]
service2_data = response.json()["data"][1]
assert service1_data["id"] == "service1"
assert service2_data["id"] == "service2"
# TODO fix numbers when muted_findings filter is fixed
assert service1_data["attributes"]["total"] == 3
assert service2_data["attributes"]["total"] == 1
assert service1_data["attributes"]["pass"] == 1
assert service2_data["attributes"]["pass"] == 1
assert service1_data["attributes"]["fail"] == 1
assert service2_data["attributes"]["fail"] == 0
assert service1_data["attributes"]["muted"] == 1
assert service2_data["attributes"]["muted"] == 0
@pytest.mark.django_db

View File

@@ -17,6 +17,7 @@ from api.models import (
ComplianceOverview,
Finding,
Invitation,
InvitationRoleRelationship,
Membership,
Provider,
ProviderGroup,
@@ -24,10 +25,13 @@ from api.models import (
ProviderSecret,
Resource,
ResourceTag,
Role,
RoleProviderGroupRelationship,
Scan,
StateChoices,
Task,
User,
UserRoleRelationship,
)
from api.rls import Tenant
@@ -176,10 +180,26 @@ class UserSerializer(BaseSerializerV1):
"""
memberships = serializers.ResourceRelatedField(many=True, read_only=True)
roles = serializers.ResourceRelatedField(many=True, read_only=True)
class Meta:
model = User
fields = ["id", "name", "email", "company_name", "date_joined", "memberships"]
fields = [
"id",
"name",
"email",
"company_name",
"date_joined",
"memberships",
"roles",
]
extra_kwargs = {
"roles": {"read_only": True},
}
included_serializers = {
"roles": "api.v1.serializers.RoleSerializer",
}
class UserCreateSerializer(BaseWriteSerializer):
@@ -235,6 +255,73 @@ class UserUpdateSerializer(BaseWriteSerializer):
return super().update(instance, validated_data)
class RoleResourceIdentifierSerializer(serializers.Serializer):
resource_type = serializers.CharField(source="type")
id = serializers.UUIDField()
class JSONAPIMeta:
resource_name = "role-identifier"
def to_representation(self, instance):
"""
Ensure 'type' is used in the output instead of 'resource_type'.
"""
representation = super().to_representation(instance)
representation["type"] = representation.pop("resource_type", None)
return representation
def to_internal_value(self, data):
"""
Map 'type' back to 'resource_type' during input.
"""
data["resource_type"] = data.pop("type", None)
return super().to_internal_value(data)
class UserRoleRelationshipSerializer(RLSSerializer, BaseWriteSerializer):
"""
Serializer for modifying user memberships
"""
roles = serializers.ListField(
child=RoleResourceIdentifierSerializer(),
help_text="List of resource identifier objects representing roles.",
)
def create(self, validated_data):
role_ids = [item["id"] for item in validated_data["roles"]]
roles = Role.objects.filter(id__in=role_ids)
tenant_id = self.context.get("tenant_id")
new_relationships = [
UserRoleRelationship(
user=self.context.get("user"), role=r, tenant_id=tenant_id
)
for r in roles
]
UserRoleRelationship.objects.bulk_create(new_relationships)
return self.context.get("user")
def update(self, instance, validated_data):
role_ids = [item["id"] for item in validated_data["roles"]]
roles = Role.objects.filter(id__in=role_ids)
tenant_id = self.context.get("tenant_id")
instance.roles.clear()
new_relationships = [
UserRoleRelationship(user=instance, role=r, tenant_id=tenant_id)
for r in roles
]
UserRoleRelationship.objects.bulk_create(new_relationships)
return instance
class Meta:
model = UserRoleRelationship
fields = ["id", "roles"]
# Tasks
class TaskBase(serializers.ModelSerializer):
state_mapping = {
@@ -361,31 +448,30 @@ class ProviderGroupSerializer(RLSSerializer, BaseWriteSerializer):
providers = serializers.ResourceRelatedField(many=True, read_only=True)
def validate(self, attrs):
tenant = self.context["tenant_id"]
name = attrs.get("name", self.instance.name if self.instance else None)
# Exclude the current instance when checking for uniqueness during updates
queryset = ProviderGroup.objects.filter(tenant=tenant, name=name)
if self.instance:
queryset = queryset.exclude(pk=self.instance.pk)
if queryset.exists():
if ProviderGroup.objects.filter(name=attrs.get("name")).exists():
raise serializers.ValidationError(
{
"name": "A provider group with this name already exists for this tenant."
}
{"name": "A provider group with this name already exists."}
)
return super().validate(attrs)
class Meta:
model = ProviderGroup
fields = ["id", "name", "inserted_at", "updated_at", "providers", "url"]
read_only_fields = ["id", "inserted_at", "updated_at"]
fields = [
"id",
"name",
"inserted_at",
"updated_at",
"providers",
"roles",
"url",
]
extra_kwargs = {
"id": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"roles": {"read_only": True},
"url": {"read_only": True},
}
@@ -406,41 +492,75 @@ class ProviderGroupUpdateSerializer(RLSSerializer, BaseWriteSerializer):
fields = ["id", "name"]
class ProviderGroupMembershipUpdateSerializer(RLSSerializer, BaseWriteSerializer):
class ProviderResourceIdentifierSerializer(serializers.Serializer):
resource_type = serializers.CharField(source="type")
id = serializers.UUIDField()
class JSONAPIMeta:
resource_name = "provider-identifier"
def to_representation(self, instance):
"""
Ensure 'type' is used in the output instead of 'resource_type'.
"""
representation = super().to_representation(instance)
representation["type"] = representation.pop("resource_type", None)
return representation
def to_internal_value(self, data):
"""
Map 'type' back to 'resource_type' during input.
"""
data["resource_type"] = data.pop("type", None)
return super().to_internal_value(data)
class ProviderGroupMembershipSerializer(RLSSerializer, BaseWriteSerializer):
"""
Serializer for modifying provider group memberships
Serializer for modifying provider_group memberships
"""
provider_ids = serializers.ListField(
child=serializers.UUIDField(),
help_text="List of provider UUIDs to add to the group",
providers = serializers.ListField(
child=ProviderResourceIdentifierSerializer(),
help_text="List of resource identifier objects representing providers.",
)
def validate(self, attrs):
tenant_id = self.context["tenant_id"]
provider_ids = attrs.get("provider_ids", [])
def create(self, validated_data):
provider_ids = [item["id"] for item in validated_data["providers"]]
providers = Provider.objects.filter(id__in=provider_ids)
tenant_id = self.context.get("tenant_id")
existing_provider_ids = set(
Provider.objects.filter(
id__in=provider_ids, tenant_id=tenant_id
).values_list("id", flat=True)
)
provided_provider_ids = set(provider_ids)
missing_provider_ids = provided_provider_ids - existing_provider_ids
if missing_provider_ids:
raise serializers.ValidationError(
{
"provider_ids": f"The following provider IDs do not exist: {', '.join(str(id) for id in missing_provider_ids)}"
}
new_relationships = [
ProviderGroupMembership(
provider_group=self.context.get("provider_group"),
provider=p,
tenant_id=tenant_id,
)
for p in providers
]
ProviderGroupMembership.objects.bulk_create(new_relationships)
return super().validate(attrs)
return self.context.get("provider_group")
def update(self, instance, validated_data):
provider_ids = [item["id"] for item in validated_data["providers"]]
providers = Provider.objects.filter(id__in=provider_ids)
tenant_id = self.context.get("tenant_id")
instance.providers.clear()
new_relationships = [
ProviderGroupMembership(
provider_group=instance, provider=p, tenant_id=tenant_id
)
for p in providers
]
ProviderGroupMembership.objects.bulk_create(new_relationships)
return instance
class Meta:
model = ProviderGroupMembership
fields = ["id", "provider_ids"]
fields = ["id", "providers"]
# Providers
@@ -1034,6 +1154,8 @@ class InvitationSerializer(RLSSerializer):
Serializer for the Invitation model.
"""
roles = serializers.ResourceRelatedField(many=True, queryset=Role.objects.all())
class Meta:
model = Invitation
fields = [
@@ -1043,6 +1165,7 @@ class InvitationSerializer(RLSSerializer):
"email",
"state",
"token",
"roles",
"expires_at",
"inviter",
"url",
@@ -1050,6 +1173,8 @@ class InvitationSerializer(RLSSerializer):
class InvitationBaseWriteSerializer(BaseWriteSerializer):
roles = serializers.ResourceRelatedField(many=True, queryset=Role.objects.all())
def validate_email(self, value):
user = User.objects.filter(email=value).first()
tenant_id = self.context["tenant_id"]
@@ -1086,31 +1211,54 @@ class InvitationCreateSerializer(InvitationBaseWriteSerializer, RLSSerializer):
class Meta:
model = Invitation
fields = ["email", "expires_at", "state", "token", "inviter"]
fields = ["email", "expires_at", "state", "token", "inviter", "roles"]
extra_kwargs = {
"token": {"read_only": True},
"state": {"read_only": True},
"inviter": {"read_only": True},
"expires_at": {"required": False},
"roles": {"required": False},
}
def create(self, validated_data):
inviter = self.context.get("request").user
tenant_id = self.context.get("tenant_id")
validated_data["inviter"] = inviter
return super().create(validated_data)
roles = validated_data.pop("roles", [])
invitation = super().create(validated_data)
for role in roles:
InvitationRoleRelationship.objects.create(
role=role, invitation=invitation, tenant_id=tenant_id
)
return invitation
class InvitationUpdateSerializer(InvitationBaseWriteSerializer):
class Meta:
model = Invitation
fields = ["id", "email", "expires_at", "state", "token"]
fields = ["id", "email", "expires_at", "state", "token", "roles"]
extra_kwargs = {
"token": {"read_only": True},
"state": {"read_only": True},
"expires_at": {"required": False},
"email": {"required": False},
"roles": {"required": False},
}
def update(self, instance, validated_data):
roles = validated_data.pop("roles", [])
tenant_id = self.context.get("tenant_id")
invitation = super().update(instance, validated_data)
if roles:
instance.roles.clear()
for role in roles:
InvitationRoleRelationship.objects.create(
role=role, invitation=invitation, tenant_id=tenant_id
)
return invitation
class InvitationAcceptSerializer(RLSSerializer):
"""Serializer for accepting an invitation."""
@@ -1122,6 +1270,179 @@ class InvitationAcceptSerializer(RLSSerializer):
fields = ["invitation_token"]
# Roles
class RoleSerializer(RLSSerializer, BaseWriteSerializer):
provider_groups = serializers.ResourceRelatedField(
many=True, queryset=ProviderGroup.objects.all()
)
permission_state = serializers.SerializerMethodField()
def get_permission_state(self, obj):
return obj.permission_state
def validate(self, attrs):
if Role.objects.filter(name=attrs.get("name")).exists():
raise serializers.ValidationError(
{"name": "A role with this name already exists."}
)
if attrs.get("manage_providers"):
attrs["unlimited_visibility"] = True
# Prevent updates to the admin role
if getattr(self.instance, "name", None) == "admin":
raise serializers.ValidationError(
{"name": "The admin role cannot be updated."}
)
return super().validate(attrs)
class Meta:
model = Role
fields = [
"id",
"name",
"manage_users",
"manage_account",
"manage_billing",
"manage_providers",
"manage_integrations",
"manage_scans",
"permission_state",
"unlimited_visibility",
"inserted_at",
"updated_at",
"provider_groups",
"users",
"invitations",
"url",
]
extra_kwargs = {
"id": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"users": {"read_only": True},
"url": {"read_only": True},
}
class RoleCreateSerializer(RoleSerializer):
def create(self, validated_data):
provider_groups = validated_data.pop("provider_groups", [])
users = validated_data.pop("users", [])
tenant_id = self.context.get("tenant_id")
role = Role.objects.create(tenant_id=tenant_id, **validated_data)
through_model_instances = [
RoleProviderGroupRelationship(
role=role,
provider_group=provider_group,
tenant_id=tenant_id,
)
for provider_group in provider_groups
]
RoleProviderGroupRelationship.objects.bulk_create(through_model_instances)
through_model_instances = [
UserRoleRelationship(
role=user,
user=user,
tenant_id=tenant_id,
)
for user in users
]
UserRoleRelationship.objects.bulk_create(through_model_instances)
return role
class RoleUpdateSerializer(RLSSerializer, BaseWriteSerializer):
class Meta:
model = Role
fields = [
"id",
"name",
"manage_users",
"manage_account",
"manage_billing",
"manage_providers",
"manage_integrations",
"manage_scans",
"unlimited_visibility",
]
class ProviderGroupResourceIdentifierSerializer(serializers.Serializer):
resource_type = serializers.CharField(source="type")
id = serializers.UUIDField()
class JSONAPIMeta:
resource_name = "provider-group-identifier"
def to_representation(self, instance):
"""
Ensure 'type' is used in the output instead of 'resource_type'.
"""
representation = super().to_representation(instance)
representation["type"] = representation.pop("resource_type", None)
return representation
def to_internal_value(self, data):
"""
Map 'type' back to 'resource_type' during input.
"""
data["resource_type"] = data.pop("type", None)
return super().to_internal_value(data)
class RoleProviderGroupRelationshipSerializer(RLSSerializer, BaseWriteSerializer):
"""
Serializer for modifying role memberships
"""
provider_groups = serializers.ListField(
child=ProviderGroupResourceIdentifierSerializer(),
help_text="List of resource identifier objects representing provider groups.",
)
def create(self, validated_data):
provider_group_ids = [item["id"] for item in validated_data["provider_groups"]]
provider_groups = ProviderGroup.objects.filter(id__in=provider_group_ids)
tenant_id = self.context.get("tenant_id")
new_relationships = [
RoleProviderGroupRelationship(
role=self.context.get("role"), provider_group=pg, tenant_id=tenant_id
)
for pg in provider_groups
]
RoleProviderGroupRelationship.objects.bulk_create(new_relationships)
return self.context.get("role")
def update(self, instance, validated_data):
provider_group_ids = [item["id"] for item in validated_data["provider_groups"]]
provider_groups = ProviderGroup.objects.filter(id__in=provider_group_ids)
tenant_id = self.context.get("tenant_id")
instance.provider_groups.clear()
new_relationships = [
RoleProviderGroupRelationship(
role=instance, provider_group=pg, tenant_id=tenant_id
)
for pg in provider_groups
]
RoleProviderGroupRelationship.objects.bulk_create(new_relationships)
return instance
class Meta:
model = RoleProviderGroupRelationship
fields = ["id", "provider_groups"]
# Compliance overview
@@ -1334,6 +1655,24 @@ class OverviewSeveritySerializer(serializers.Serializer):
return {"version": "v1"}
class OverviewServiceSerializer(serializers.Serializer):
id = serializers.CharField(source="service")
total = serializers.IntegerField()
_pass = serializers.IntegerField()
fail = serializers.IntegerField()
muted = serializers.IntegerField()
class JSONAPIMeta:
resource_name = "services-overview"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["pass"] = self.fields.pop("_pass")
def get_root_meta(self, _resource, _many):
return {"version": "v1"}
# Schedules

View File

@@ -3,16 +3,20 @@ from drf_spectacular.views import SpectacularRedocView
from rest_framework_nested import routers
from api.v1.views import (
ComplianceOverviewViewSet,
CustomTokenObtainView,
CustomTokenRefreshView,
FindingViewSet,
InvitationAcceptViewSet,
InvitationViewSet,
MembershipViewSet,
OverviewViewSet,
ProviderGroupViewSet,
ProviderGroupProvidersRelationshipView,
ProviderSecretViewSet,
InvitationViewSet,
InvitationAcceptViewSet,
RoleViewSet,
RoleProviderGroupRelationshipView,
UserRoleRelationshipView,
OverviewViewSet,
ComplianceOverviewViewSet,
ProviderViewSet,
ResourceViewSet,
ScanViewSet,
@@ -29,11 +33,12 @@ router = routers.DefaultRouter(trailing_slash=False)
router.register(r"users", UserViewSet, basename="user")
router.register(r"tenants", TenantViewSet, basename="tenant")
router.register(r"providers", ProviderViewSet, basename="provider")
router.register(r"provider_groups", ProviderGroupViewSet, basename="providergroup")
router.register(r"provider-groups", ProviderGroupViewSet, basename="providergroup")
router.register(r"scans", ScanViewSet, basename="scan")
router.register(r"tasks", TaskViewSet, basename="task")
router.register(r"resources", ResourceViewSet, basename="resource")
router.register(r"findings", FindingViewSet, basename="finding")
router.register(r"roles", RoleViewSet, basename="role")
router.register(
r"compliance-overviews", ComplianceOverviewViewSet, basename="complianceoverview"
)
@@ -80,6 +85,27 @@ urlpatterns = [
InvitationAcceptViewSet.as_view({"post": "accept"}),
name="invitation-accept",
),
path(
"roles/<uuid:pk>/relationships/provider_groups",
RoleProviderGroupRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="role-provider-groups-relationship",
),
path(
"users/<uuid:pk>/relationships/roles",
UserRoleRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="user-roles-relationship",
),
path(
"provider-groups/<uuid:pk>/relationships/providers",
ProviderGroupProvidersRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="provider_group-providers-relationship",
),
path("", include(router.urls)),
path("", include(tenants_router.urls)),
path("", include(users_router.urls)),

View File

@@ -16,6 +16,7 @@ from drf_spectacular.utils import (
extend_schema_view,
)
from drf_spectacular.views import SpectacularAPIView
from drf_spectacular_jsonapi.schemas.openapi import JsonApiAutoSchema
from rest_framework import permissions, status
from rest_framework.decorators import action
from rest_framework.exceptions import (
@@ -25,12 +26,14 @@ from rest_framework.exceptions import (
ValidationError,
)
from rest_framework.generics import GenericAPIView, get_object_or_404
from rest_framework_json_api.views import Response
from rest_framework.permissions import SAFE_METHODS
from rest_framework_json_api.views import RelationshipView, Response
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
from tasks.beat import schedule_provider_scan
from tasks.tasks import (
check_provider_connection_task,
delete_provider_task,
delete_tenant_task,
perform_scan_summary_task,
perform_scan_task,
)
@@ -46,8 +49,10 @@ from api.filters import (
ProviderGroupFilter,
ProviderSecretFilter,
ResourceFilter,
RoleFilter,
ScanFilter,
ScanSummaryFilter,
ServiceOverviewFilter,
TaskFilter,
TenantFilter,
UserFilter,
@@ -62,6 +67,8 @@ from api.models import (
ProviderGroupMembership,
ProviderSecret,
Resource,
Role,
RoleProviderGroupRelationship,
Scan,
ScanSummary,
SeverityChoices,
@@ -69,8 +76,10 @@ from api.models import (
StatusChoices,
Task,
User,
UserRoleRelationship,
)
from api.pagination import ComplianceOverviewPagination
from api.rbac.permissions import Permissions, get_providers, get_role
from api.rls import Tenant
from api.utils import validate_invitation
from api.uuid_utils import datetime_to_uuid7
@@ -86,9 +95,10 @@ from api.v1.serializers import (
MembershipSerializer,
OverviewFindingSerializer,
OverviewProviderSerializer,
OverviewServiceSerializer,
OverviewSeveritySerializer,
ProviderCreateSerializer,
ProviderGroupMembershipUpdateSerializer,
ProviderGroupMembershipSerializer,
ProviderGroupSerializer,
ProviderGroupUpdateSerializer,
ProviderSecretCreateSerializer,
@@ -97,6 +107,10 @@ from api.v1.serializers import (
ProviderSerializer,
ProviderUpdateSerializer,
ResourceSerializer,
RoleCreateSerializer,
RoleProviderGroupRelationshipSerializer,
RoleSerializer,
RoleUpdateSerializer,
ScanCreateSerializer,
ScanSerializer,
ScanUpdateSerializer,
@@ -106,6 +120,7 @@ from api.v1.serializers import (
TokenRefreshSerializer,
TokenSerializer,
UserCreateSerializer,
UserRoleRelationshipSerializer,
UserSerializer,
UserUpdateSerializer,
)
@@ -116,6 +131,11 @@ CACHE_DECORATOR = cache_control(
)
class RelationshipViewSchema(JsonApiAutoSchema):
def _resolve_path_parameters(self, _path_variables):
return []
@extend_schema(
tags=["Token"],
summary="Obtain a token",
@@ -171,7 +191,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.0.0"
spectacular_settings.VERSION = "1.1.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -270,6 +290,19 @@ class UserViewSet(BaseUserViewset):
filterset_class = UserFilter
ordering = ["-date_joined"]
ordering_fields = ["name", "email", "company_name", "date_joined", "is_active"]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_USERS]
def set_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if self.action == "me":
# No permissions required for me request
self.required_permissions = []
else:
# Require permission for the rest of the requests
self.required_permissions = [Permissions.MANAGE_USERS]
def get_queryset(self):
# If called during schema generation, return an empty queryset
@@ -346,11 +379,125 @@ class UserViewSet(BaseUserViewset):
user=user, tenant=tenant, role=role
)
if invitation:
user_role = []
for role in invitation.roles.all():
user_role.append(
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user, role=role, tenant=invitation.tenant
)
)
invitation.state = Invitation.State.ACCEPTED
invitation.save(using=MainRouter.admin_db)
else:
role = Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user,
role=role,
tenant_id=tenant.id,
)
return Response(data=UserSerializer(user).data, status=status.HTTP_201_CREATED)
@extend_schema_view(
create=extend_schema(
tags=["User"],
summary="Create a new user-roles relationship",
description="Add a new user-roles relationship to the system by providing the required user-roles details.",
responses={
204: OpenApiResponse(description="Relationship created successfully"),
400: OpenApiResponse(
description="Bad request (e.g., relationship already exists)"
),
},
),
partial_update=extend_schema(
tags=["User"],
summary="Partially update a user-roles relationship",
description="Update the user-roles relationship information without affecting other fields.",
responses={
204: OpenApiResponse(
response=None, description="Relationship updated successfully"
)
},
),
destroy=extend_schema(
tags=["User"],
summary="Delete a user-roles relationship",
description="Remove the user-roles relationship from the system by their ID.",
responses={
204: OpenApiResponse(
response=None, description="Relationship deleted successfully"
)
},
),
)
class UserRoleRelationshipView(RelationshipView, BaseRLSViewSet):
queryset = User.objects.all()
serializer_class = UserRoleRelationshipSerializer
resource_name = "roles"
http_method_names = ["post", "patch", "delete"]
schema = RelationshipViewSchema()
# RBAC required permissions
required_permissions = [Permissions.MANAGE_USERS]
def get_queryset(self):
return User.objects.all()
def create(self, request, *args, **kwargs):
user = self.get_object()
role_ids = [item["id"] for item in request.data]
existing_relationships = UserRoleRelationship.objects.filter(
user=user, role_id__in=role_ids
)
if existing_relationships.exists():
return Response(
{"detail": "One or more roles are already associated with the user."},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = self.get_serializer(
data={"roles": request.data},
context={
"user": user,
"tenant_id": self.request.tenant_id,
"request": request,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def partial_update(self, request, *args, **kwargs):
user = self.get_object()
serializer = self.get_serializer(
instance=user,
data={"roles": request.data},
context={"tenant_id": self.request.tenant_id, "request": request},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def destroy(self, request, *args, **kwargs):
user = self.get_object()
user.roles.clear()
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
list=extend_schema(
tags=["Tenant"],
@@ -388,6 +535,8 @@ class TenantViewSet(BaseTenantViewset):
search_fields = ["name"]
ordering = ["-inserted_at"]
ordering_fields = ["name", "inserted_at", "updated_at"]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_ACCOUNT]
def get_queryset(self):
return Tenant.objects.all()
@@ -401,6 +550,25 @@ class TenantViewSet(BaseTenantViewset):
)
return Response(data=serializer.data, status=status.HTTP_201_CREATED)
def destroy(self, request, *args, **kwargs):
# This will perform validation and raise a 404 if the tenant does not exist
tenant_id = kwargs.get("pk")
get_object_or_404(Tenant, id=tenant_id)
with transaction.atomic():
# Delete memberships
Membership.objects.using(MainRouter.admin_db).filter(
tenant_id=tenant_id
).delete()
# Delete users without memberships
User.objects.using(MainRouter.admin_db).filter(
membership__isnull=True
).delete()
# Delete tenant in batches
delete_tenant_task.apply_async(kwargs={"tenant_id": tenant_id})
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
list=extend_schema(
@@ -426,6 +594,8 @@ class MembershipViewSet(BaseTenantViewset):
"role",
"date_joined",
]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_ACCOUNT]
def get_queryset(self):
user = self.request.user
@@ -459,6 +629,8 @@ class TenantMembersViewSet(BaseTenantViewset):
http_method_names = ["get", "delete"]
serializer_class = MembershipSerializer
queryset = Membership.objects.none()
# RBAC required permissions
required_permissions = [Permissions.MANAGE_ACCOUNT]
def get_queryset(self):
tenant = self.get_tenant()
@@ -542,66 +714,126 @@ class ProviderGroupViewSet(BaseRLSViewSet):
queryset = ProviderGroup.objects.all()
serializer_class = ProviderGroupSerializer
filterset_class = ProviderGroupFilter
http_method_names = ["get", "post", "patch", "put", "delete"]
http_method_names = ["get", "post", "patch", "delete"]
ordering = ["inserted_at"]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_PROVIDERS]
def set_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if self.request.method in SAFE_METHODS:
# No permissions required for GET requests
self.required_permissions = []
else:
# Require permission for non-GET requests
self.required_permissions = [Permissions.MANAGE_PROVIDERS]
def get_queryset(self):
return ProviderGroup.objects.prefetch_related("providers")
user_roles = get_role(self.request.user)
# Check if any of the user's roles have UNLIMITED_VISIBILITY
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all provider groups
return ProviderGroup.objects.prefetch_related("providers")
# Collect provider groups associated with the user's roles
return user_roles.provider_groups.all()
def get_serializer_class(self):
if self.action == "partial_update":
return ProviderGroupUpdateSerializer
elif self.action == "providers":
if hasattr(self, "response_serializer_class"):
return self.response_serializer_class
return ProviderGroupMembershipUpdateSerializer
return super().get_serializer_class()
@extend_schema(
tags=["Provider Group"],
summary="Add providers to a provider group",
description="Add one or more providers to an existing provider group.",
request=ProviderGroupMembershipUpdateSerializer,
responses={200: OpenApiResponse(response=ProviderGroupSerializer)},
)
@action(detail=True, methods=["put"], url_name="providers")
def providers(self, request, pk=None):
@extend_schema(tags=["Provider Group"])
@extend_schema_view(
create=extend_schema(
summary="Create a new provider_group-providers relationship",
description="Add a new provider_group-providers relationship to the system by providing the required provider_group-providers details.",
responses={
204: OpenApiResponse(description="Relationship created successfully"),
400: OpenApiResponse(
description="Bad request (e.g., relationship already exists)"
),
},
),
partial_update=extend_schema(
summary="Partially update a provider_group-providers relationship",
description="Update the provider_group-providers relationship information without affecting other fields.",
responses={
204: OpenApiResponse(
response=None, description="Relationship updated successfully"
)
},
),
destroy=extend_schema(
summary="Delete a provider_group-providers relationship",
description="Remove the provider_group-providers relationship from the system by their ID.",
responses={
204: OpenApiResponse(
response=None, description="Relationship deleted successfully"
)
},
),
)
class ProviderGroupProvidersRelationshipView(RelationshipView, BaseRLSViewSet):
queryset = ProviderGroup.objects.all()
serializer_class = ProviderGroupMembershipSerializer
resource_name = "providers"
http_method_names = ["post", "patch", "delete"]
schema = RelationshipViewSchema()
# RBAC required permissions
required_permissions = [Permissions.MANAGE_PROVIDERS]
def get_queryset(self):
return ProviderGroup.objects.all()
def create(self, request, *args, **kwargs):
provider_group = self.get_object()
# Validate input data
serializer = self.get_serializer_class()(
data=request.data,
context=self.get_serializer_context(),
provider_ids = [item["id"] for item in request.data]
existing_relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group, provider_id__in=provider_ids
)
if existing_relationships.exists():
return Response(
{
"detail": "One or more providers are already associated with the provider_group."
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = self.get_serializer(
data={"providers": request.data},
context={
"provider_group": provider_group,
"tenant_id": self.request.tenant_id,
"request": request,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
provider_ids = serializer.validated_data["provider_ids"]
return Response(status=status.HTTP_204_NO_CONTENT)
# Update memberships
ProviderGroupMembership.objects.filter(
provider_group=provider_group, tenant_id=request.tenant_id
).delete()
provider_group_memberships = [
ProviderGroupMembership(
tenant_id=self.request.tenant_id,
provider_group=provider_group,
provider_id=provider_id,
)
for provider_id in provider_ids
]
ProviderGroupMembership.objects.bulk_create(
provider_group_memberships, ignore_conflicts=True
def partial_update(self, request, *args, **kwargs):
provider_group = self.get_object()
serializer = self.get_serializer(
instance=provider_group,
data={"providers": request.data},
context={"tenant_id": self.request.tenant_id, "request": request},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
# Return the updated provider group with providers
provider_group.refresh_from_db()
self.response_serializer_class = ProviderGroupSerializer
response_serializer = ProviderGroupSerializer(
provider_group, context=self.get_serializer_context()
)
return Response(data=response_serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, *args, **kwargs):
provider_group = self.get_object()
provider_group.providers.clear()
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
@@ -651,9 +883,28 @@ class ProviderViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_PROVIDERS]
def set_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if self.request.method in SAFE_METHODS:
# No permissions required for GET requests
self.required_permissions = []
else:
# Require permission for non-GET requests
self.required_permissions = [Permissions.MANAGE_PROVIDERS]
def get_queryset(self):
return Provider.objects.all()
user_roles = get_role(self.request.user)
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all providers
return Provider.objects.all()
# User lacks permission, filter providers based on provider groups associated with the role
return get_providers(user_roles)
def get_serializer_class(self):
if self.action == "create":
@@ -773,9 +1024,28 @@ class ScanViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_SCANS]
def set_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if self.request.method in SAFE_METHODS:
# No permissions required for GET requests
self.required_permissions = [Permissions.MANAGE_PROVIDERS]
else:
# Require permission for non-GET requests
self.required_permissions = [Permissions.MANAGE_SCANS]
def get_queryset(self):
return Scan.objects.all()
user_roles = get_role(self.request.user)
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all scans
return Scan.objects.all()
# User lacks permission, filter providers based on provider groups associated with the role
return Scan.objects.filter(provider__in=get_providers(user_roles))
def get_serializer_class(self):
if self.action == "create":
@@ -865,10 +1135,13 @@ class TaskViewSet(BaseRLSViewSet):
search_fields = ["name"]
ordering = ["-inserted_at"]
ordering_fields = ["inserted_at", "completed_at", "name", "state"]
# RBAC required permissions
required_permissions = []
def get_queryset(self):
return Task.objects.annotate(
name=F("task_runner_task__task_name"), state=F("task_runner_task__status")
name=F("task_runner_task__task_name"),
state=F("task_runner_task__status"),
)
def destroy(self, request, *args, pk=None, **kwargs):
@@ -930,11 +1203,19 @@ class ResourceViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of the provider through the provider group)
required_permissions = []
def get_queryset(self):
queryset = Resource.objects.all()
search_value = self.request.query_params.get("filter[search]", None)
user_roles = get_role(self.request.user)
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all scans
queryset = Resource.objects.all()
else:
# User lacks permission, filter providers based on provider groups associated with the role
queryset = Resource.objects.filter(provider__in=get_providers(user_roles))
search_value = self.request.query_params.get("filter[search]", None)
if search_value:
# Django's ORM will build a LEFT JOIN and OUTER JOIN on the "through" table, resulting in duplicates
# The duplicates then require a `distinct` query
@@ -1005,11 +1286,8 @@ class FindingViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
def inserted_at_to_uuidv7(self, inserted_at):
if inserted_at is None:
return None
return datetime_to_uuid7(inserted_at)
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of the provider through the provider group)
required_permissions = []
def get_serializer_class(self):
if self.action == "findings_services_regions":
@@ -1018,9 +1296,17 @@ class FindingViewSet(BaseRLSViewSet):
return super().get_serializer_class()
def get_queryset(self):
queryset = Finding.objects.all()
search_value = self.request.query_params.get("filter[search]", None)
user_roles = get_role(self.request.user)
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all scans
queryset = Finding.objects.all()
else:
# User lacks permission, filter providers based on provider groups associated with the role
queryset = Finding.objects.filter(
scan__provider__in=get_providers(user_roles)
)
search_value = self.request.query_params.get("filter[search]", None)
if search_value:
# Django's ORM will build a LEFT JOIN and OUTER JOIN on any "through" tables, resulting in duplicates
# The duplicates then require a `distinct` query
@@ -1048,6 +1334,11 @@ class FindingViewSet(BaseRLSViewSet):
return queryset
def inserted_at_to_uuidv7(self, inserted_at):
if inserted_at is None:
return None
return datetime_to_uuid7(inserted_at)
@action(detail=False, methods=["get"], url_name="findings_services_regions")
def findings_services_regions(self, request):
queryset = self.get_queryset()
@@ -1111,6 +1402,8 @@ class ProviderSecretViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_PROVIDERS]
def get_queryset(self):
return ProviderSecret.objects.all()
@@ -1168,6 +1461,8 @@ class InvitationViewSet(BaseRLSViewSet):
"state",
"inviter",
]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_ACCOUNT]
def get_queryset(self):
return Invitation.objects.all()
@@ -1255,6 +1550,13 @@ class InvitationAcceptViewSet(BaseRLSViewSet):
user=user,
tenant=invitation.tenant,
)
user_role = []
for role in invitation.roles.all():
user_role.append(
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user, role=role, tenant=invitation.tenant
)
)
invitation.state = Invitation.State.ACCEPTED
invitation.save(using=MainRouter.admin_db)
@@ -1263,6 +1565,154 @@ class InvitationAcceptViewSet(BaseRLSViewSet):
return Response(data=membership_serializer.data, status=status.HTTP_201_CREATED)
@extend_schema(tags=["Role"])
@extend_schema_view(
list=extend_schema(
tags=["Role"],
summary="List all roles",
description="Retrieve a list of all roles with options for filtering by various criteria.",
),
retrieve=extend_schema(
tags=["Role"],
summary="Retrieve data from a role",
description="Fetch detailed information about a specific role by their ID.",
),
create=extend_schema(
tags=["Role"],
summary="Create a new role",
description="Add a new role to the system by providing the required role details.",
),
partial_update=extend_schema(
tags=["Role"],
summary="Partially update a role",
description="Update certain fields of an existing role's information without affecting other fields.",
responses={200: RoleSerializer},
),
destroy=extend_schema(
tags=["Role"],
summary="Delete a role",
description="Remove a role from the system by their ID.",
),
)
class RoleViewSet(BaseRLSViewSet):
queryset = Role.objects.all()
serializer_class = RoleSerializer
filterset_class = RoleFilter
http_method_names = ["get", "post", "patch", "delete"]
ordering = ["inserted_at"]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_ACCOUNT]
def get_queryset(self):
return Role.objects.all()
def get_serializer_class(self):
if self.action == "create":
return RoleCreateSerializer
elif self.action == "partial_update":
return RoleUpdateSerializer
return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs):
user_role = get_role(request.user)
# If the user is the owner of the role, the manage_account field is not editable
if user_role and kwargs["pk"] == str(user_role.id):
request.data["manage_account"] = str(user_role.manage_account).lower()
return super().partial_update(request, *args, **kwargs)
@extend_schema_view(
create=extend_schema(
tags=["Role"],
summary="Create a new role-provider_groups relationship",
description="Add a new role-provider_groups relationship to the system by providing the required role-provider_groups details.",
responses={
204: OpenApiResponse(description="Relationship created successfully"),
400: OpenApiResponse(
description="Bad request (e.g., relationship already exists)"
),
},
),
partial_update=extend_schema(
tags=["Role"],
summary="Partially update a role-provider_groups relationship",
description="Update the role-provider_groups relationship information without affecting other fields.",
responses={
204: OpenApiResponse(
response=None, description="Relationship updated successfully"
)
},
),
destroy=extend_schema(
tags=["Role"],
summary="Delete a role-provider_groups relationship",
description="Remove the role-provider_groups relationship from the system by their ID.",
responses={
204: OpenApiResponse(
response=None, description="Relationship deleted successfully"
)
},
),
)
class RoleProviderGroupRelationshipView(RelationshipView, BaseRLSViewSet):
queryset = Role.objects.all()
serializer_class = RoleProviderGroupRelationshipSerializer
resource_name = "provider_groups"
http_method_names = ["post", "patch", "delete"]
schema = RelationshipViewSchema()
# RBAC required permissions
required_permissions = [Permissions.MANAGE_ACCOUNT]
def get_queryset(self):
return Role.objects.all()
def create(self, request, *args, **kwargs):
role = self.get_object()
provider_group_ids = [item["id"] for item in request.data]
existing_relationships = RoleProviderGroupRelationship.objects.filter(
role=role, provider_group_id__in=provider_group_ids
)
if existing_relationships.exists():
return Response(
{
"detail": "One or more provider groups are already associated with the role."
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = self.get_serializer(
data={"provider_groups": request.data},
context={
"role": role,
"tenant_id": self.request.tenant_id,
"request": request,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def partial_update(self, request, *args, **kwargs):
role = self.get_object()
serializer = self.get_serializer(
instance=role,
data={"provider_groups": request.data},
context={"tenant_id": self.request.tenant_id, "request": request},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def destroy(self, request, *args, **kwargs):
role = self.get_object()
role.provider_groups.clear()
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
list=extend_schema(
tags=["Compliance Overview"],
@@ -1297,12 +1747,32 @@ class ComplianceOverviewViewSet(BaseRLSViewSet):
search_fields = ["compliance_id"]
ordering = ["compliance_id"]
ordering_fields = ["inserted_at", "compliance_id", "framework", "region"]
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of the provider through the provider group)
required_permissions = []
def get_queryset(self):
if self.action == "retrieve":
return ComplianceOverview.objects.all()
role = get_role(self.request.user)
unlimited_visibility = getattr(
role, Permissions.UNLIMITED_VISIBILITY.value, False
)
base_queryset = self.filter_queryset(ComplianceOverview.objects.all())
if self.action == "retrieve":
if unlimited_visibility:
# User has unlimited visibility, return all compliance compliances
return ComplianceOverview.objects.all()
providers = get_providers(role)
return ComplianceOverview.objects.filter(scan__provider__in=providers)
if unlimited_visibility:
base_queryset = self.filter_queryset(ComplianceOverview.objects.all())
else:
providers = Provider.objects.filter(
provider_groups__in=role.provider_groups.all()
).distinct()
base_queryset = self.filter_queryset(
ComplianceOverview.objects.filter(scan__provider__in=providers)
)
max_failed_ids = (
base_queryset.filter(compliance_id=OuterRef("compliance_id"))
@@ -1310,12 +1780,10 @@ class ComplianceOverviewViewSet(BaseRLSViewSet):
.values("id")[:1]
)
queryset = base_queryset.filter(id__in=Subquery(max_failed_ids)).order_by(
return base_queryset.filter(id__in=Subquery(max_failed_ids)).order_by(
"compliance_id"
)
return queryset
def get_serializer_class(self):
if self.action == "retrieve":
return ComplianceOverviewFullSerializer
@@ -1367,20 +1835,37 @@ class ComplianceOverviewViewSet(BaseRLSViewSet):
),
filters=True,
),
services=extend_schema(
summary="Get findings data by service",
description=(
"Retrieve an aggregated summary of findings grouped by service. The response includes the total count "
"of findings for each service, as long as there are at least one finding for that service. At least "
"one of the `inserted_at` filters must be provided."
),
filters=True,
),
)
@method_decorator(CACHE_DECORATOR, name="list")
class OverviewViewSet(BaseRLSViewSet):
queryset = ComplianceOverview.objects.all()
http_method_names = ["get"]
ordering = ["-id"]
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of the provider through the provider group)
required_permissions = []
def get_queryset(self):
role = get_role(self.request.user)
providers = get_providers(role)
def _get_filtered_queryset(model):
if role.unlimited_visibility:
return model.objects.all()
return model.objects.filter(scan__provider__in=providers)
if self.action == "providers":
return Finding.objects.all()
elif self.action == "findings":
return ScanSummary.objects.all()
elif self.action == "findings_severity":
return ScanSummary.objects.all()
return _get_filtered_queryset(Finding)
elif self.action in ("findings", "findings_severity", "services"):
return _get_filtered_queryset(ScanSummary)
else:
return super().get_queryset()
@@ -1391,6 +1876,8 @@ class OverviewViewSet(BaseRLSViewSet):
return OverviewFindingSerializer
elif self.action == "findings_severity":
return OverviewSeveritySerializer
elif self.action == "services":
return OverviewServiceSerializer
return super().get_serializer_class()
def get_filterset_class(self):
@@ -1398,6 +1885,8 @@ class OverviewViewSet(BaseRLSViewSet):
return None
elif self.action in ["findings", "findings_severity"]:
return ScanSummaryFilter
elif self.action == "services":
return ServiceOverviewFilter
return None
@extend_schema(exclude=True)
@@ -1543,6 +2032,38 @@ class OverviewViewSet(BaseRLSViewSet):
serializer = OverviewSeveritySerializer(severity_data)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(detail=False, methods=["get"], url_name="services")
def services(self, request):
queryset = self.get_queryset()
filtered_queryset = self.filter_queryset(queryset)
latest_scan_subquery = (
Scan.objects.filter(
state=StateChoices.COMPLETED, provider_id=OuterRef("scan__provider_id")
)
.order_by("-id")
.values("id")[:1]
)
annotated_queryset = filtered_queryset.annotate(
latest_scan_id=Subquery(latest_scan_subquery)
)
filtered_queryset = annotated_queryset.filter(scan_id=F("latest_scan_id"))
services_data = (
filtered_queryset.values("service")
.annotate(_pass=Sum("_pass"))
.annotate(fail=Sum("fail"))
.annotate(muted=Sum("muted"))
.annotate(total=Sum("total"))
.order_by("service")
)
serializer = OverviewServiceSerializer(services_data, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(tags=["Schedule"])
@extend_schema_view(
@@ -1558,6 +2079,8 @@ class ScheduleViewSet(BaseRLSViewSet):
# TODO: change to Schedule when implemented
queryset = Task.objects.none()
http_method_names = ["post"]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_SCANS]
def get_queryset(self):
return super().get_queryset()

View File

@@ -35,10 +35,10 @@ class RLSTask(Task):
**options,
)
task_result_instance = TaskResult.objects.get(task_id=result.task_id)
from api.db_utils import tenant_transaction
from api.db_utils import rls_transaction
tenant_id = kwargs.get("tenant_id")
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
APITask.objects.create(
id=task_result_instance.task_id,
tenant_id=tenant_id,

View File

@@ -10,8 +10,8 @@ DATABASES = {
"default": {
"ENGINE": "psqlextra.backend",
"NAME": "prowler_db_test",
"USER": env("POSTGRES_USER", default="prowler"),
"PASSWORD": env("POSTGRES_PASSWORD", default="S3cret"),
"USER": env("POSTGRES_USER", default="prowler_admin"),
"PASSWORD": env("POSTGRES_PASSWORD", default="postgres"),
"HOST": env("POSTGRES_HOST", default="localhost"),
"PORT": env("POSTGRES_PORT", default="5432"),
},

View File

@@ -1,35 +1,39 @@
import logging
from datetime import datetime, timedelta, timezone
from unittest.mock import patch
import pytest
from django.conf import settings
from datetime import datetime, timezone, timedelta
from django.db import connections as django_connections, connection as django_connection
from django.db import connection as django_connection
from django.db import connections as django_connections
from django.urls import reverse
from django_celery_results.models import TaskResult
from prowler.lib.check.models import Severity
from prowler.lib.outputs.finding import Status
from rest_framework import status
from rest_framework.test import APIClient
from api.db_utils import rls_transaction
from api.models import (
ComplianceOverview,
Finding,
)
from api.models import (
User,
Invitation,
Membership,
Provider,
ProviderGroup,
ProviderSecret,
Resource,
ResourceTag,
Role,
Scan,
ScanSummary,
StateChoices,
Task,
Membership,
ProviderSecret,
Invitation,
ComplianceOverview,
User,
UserRoleRelationship,
)
from api.rls import Tenant
from api.v1.serializers import TokenSerializer
from prowler.lib.check.models import Severity
from prowler.lib.outputs.finding import Status
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
@@ -83,8 +87,148 @@ def create_test_user(django_db_setup, django_db_blocker):
return user
@pytest.fixture(scope="function")
def create_test_user_rbac(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
user = User.objects.create_user(
name="testing",
email="rbac@rbac.com",
password=TEST_PASSWORD,
)
tenant = Tenant.objects.create(
name="Tenant Test",
)
Membership.objects.create(
user=user,
tenant=tenant,
role=Membership.RoleChoices.OWNER,
)
Role.objects.create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.create(
user=user,
role=Role.objects.get(name="admin"),
tenant_id=tenant.id,
)
return user
@pytest.fixture(scope="function")
def create_test_user_rbac_no_roles(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
user = User.objects.create_user(
name="testing",
email="rbac_noroles@rbac.com",
password=TEST_PASSWORD,
)
tenant = Tenant.objects.create(
name="Tenant Test",
)
Membership.objects.create(
user=user,
tenant=tenant,
role=Membership.RoleChoices.OWNER,
)
return user
@pytest.fixture(scope="function")
def create_test_user_rbac_limited(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
user = User.objects.create_user(
name="testing_limited",
email="rbac_limited@rbac.com",
password=TEST_PASSWORD,
)
tenant = Tenant.objects.create(
name="Tenant Test",
)
Membership.objects.create(
user=user,
tenant=tenant,
role=Membership.RoleChoices.OWNER,
)
Role.objects.create(
name="limited",
tenant_id=tenant.id,
manage_users=False,
manage_account=False,
manage_billing=False,
manage_providers=False,
manage_integrations=False,
manage_scans=False,
unlimited_visibility=False,
)
UserRoleRelationship.objects.create(
user=user,
role=Role.objects.get(name="limited"),
tenant_id=tenant.id,
)
return user
@pytest.fixture
def authenticated_client(create_test_user, tenants_fixture, client):
def authenticated_client_rbac(create_test_user_rbac, tenants_fixture, client):
client.user = create_test_user_rbac
serializer = TokenSerializer(
data={"type": "tokens", "email": "rbac@rbac.com", "password": TEST_PASSWORD}
)
serializer.is_valid()
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
@pytest.fixture
def authenticated_client_rbac_noroles(
create_test_user_rbac_no_roles, tenants_fixture, client
):
client.user = create_test_user_rbac_no_roles
serializer = TokenSerializer(
data={
"type": "tokens",
"email": "rbac_noroles@rbac.com",
"password": TEST_PASSWORD,
}
)
serializer.is_valid()
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
@pytest.fixture
def authenticated_client_no_permissions_rbac(
create_test_user_rbac_limited, tenants_fixture, client
):
client.user = create_test_user_rbac_limited
serializer = TokenSerializer(
data={
"type": "tokens",
"email": "rbac_limited@rbac.com",
"password": TEST_PASSWORD,
}
)
serializer.is_valid()
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
@pytest.fixture
def authenticated_client(
create_test_user, tenants_fixture, set_user_admin_roles_fixture, client
):
client.user = create_test_user
serializer = TokenSerializer(
data={"type": "tokens", "email": TEST_USER, "password": TEST_PASSWORD}
@@ -104,6 +248,7 @@ def authenticated_api_client(create_test_user, tenants_fixture):
serializer.is_valid()
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
@@ -128,9 +273,33 @@ def tenants_fixture(create_test_user):
tenant3 = Tenant.objects.create(
name="Tenant Three",
)
return tenant1, tenant2, tenant3
@pytest.fixture
def set_user_admin_roles_fixture(create_test_user, tenants_fixture):
user = create_test_user
for tenant in tenants_fixture[:2]:
with rls_transaction(str(tenant.id)):
role = Role.objects.create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.create(
user=user,
role=role,
tenant_id=tenant.id,
)
@pytest.fixture
def invitations_fixture(create_test_user, tenants_fixture):
user = create_test_user
@@ -210,6 +379,57 @@ def provider_groups_fixture(tenants_fixture):
return pgroup1, pgroup2, pgroup3
@pytest.fixture
def roles_fixture(tenants_fixture):
tenant, *_ = tenants_fixture
role1 = Role.objects.create(
name="Role One",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=False,
manage_scans=True,
unlimited_visibility=False,
)
role2 = Role.objects.create(
name="Role Two",
tenant_id=tenant.id,
manage_users=False,
manage_account=False,
manage_billing=False,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
role3 = Role.objects.create(
name="Role Three",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
role4 = Role.objects.create(
name="Role Four",
tenant_id=tenant.id,
manage_users=False,
manage_account=False,
manage_billing=False,
manage_providers=False,
manage_integrations=False,
manage_scans=False,
unlimited_visibility=False,
)
return role1, role2, role3, role4
@pytest.fixture
def provider_secret_fixture(providers_fixture):
return tuple(
@@ -537,10 +757,107 @@ def get_api_tokens(
data=json_body,
format="vnd.api+json",
)
return response.json()["data"]["attributes"]["access"], response.json()["data"][
"attributes"
]["refresh"]
return (
response.json()["data"]["attributes"]["access"],
response.json()["data"]["attributes"]["refresh"],
)
@pytest.fixture
def scan_summaries_fixture(tenants_fixture, providers_fixture):
tenant = tenants_fixture[0]
provider = providers_fixture[0]
scan = Scan.objects.create(
name="overview scan",
provider=provider,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant=tenant,
)
ScanSummary.objects.create(
tenant=tenant,
check_id="check1",
service="service1",
severity="high",
region="region1",
_pass=1,
fail=0,
muted=0,
total=1,
new=1,
changed=0,
unchanged=0,
fail_new=0,
fail_changed=0,
pass_new=1,
pass_changed=0,
muted_new=0,
muted_changed=0,
scan=scan,
)
ScanSummary.objects.create(
tenant=tenant,
check_id="check1",
service="service1",
severity="high",
region="region2",
_pass=0,
fail=1,
muted=1,
total=2,
new=2,
changed=0,
unchanged=0,
fail_new=1,
fail_changed=0,
pass_new=0,
pass_changed=0,
muted_new=1,
muted_changed=0,
scan=scan,
)
ScanSummary.objects.create(
tenant=tenant,
check_id="check2",
service="service2",
severity="critical",
region="region1",
_pass=1,
fail=0,
muted=0,
total=1,
new=1,
changed=0,
unchanged=0,
fail_new=0,
fail_changed=0,
pass_new=1,
pass_changed=0,
muted_new=0,
muted_changed=0,
scan=scan,
)
def get_authorization_header(access_token: str) -> dict:
return {"Authorization": f"Bearer {access_token}"}
def pytest_collection_modifyitems(items):
"""Ensure test_rbac.py is executed first."""
items.sort(key=lambda item: 0 if "test_rbac.py" in item.nodeid else 1)
def pytest_configure(config):
# Apply the mock before the test session starts. This is necessary to avoid admin error when running the
# 0004_rbac_missing_admin_roles migration
patch("api.db_router.MainRouter.admin_db", new="default").start()
def pytest_unconfigure(config):
# Stop all patches after the test session ends. This is necessary to avoid admin error when running the
# 0004_rbac_missing_admin_roles migration
patch.stopall()

View File

@@ -1,8 +1,9 @@
from celery.utils.log import get_task_logger
from django.db import transaction
from api.db_utils import batch_delete
from api.models import Finding, Provider, Resource, Scan, ScanSummary
from api.db_router import MainRouter
from api.db_utils import batch_delete, rls_transaction
from api.models import Finding, Provider, Resource, Scan, ScanSummary, Tenant
logger = get_task_logger(__name__)
@@ -49,3 +50,26 @@ def delete_provider(pk: str):
deletion_summary.update(provider_summary)
return deletion_summary
def delete_tenant(pk: str):
"""
Gracefully deletes an instance of a tenant along with its related data.
Args:
pk (str): The primary key of the Tenant instance to delete.
Returns:
dict: A dictionary with the count of deleted objects per model,
including related models.
"""
deletion_summary = {}
for provider in Provider.objects.using(MainRouter.admin_db).filter(tenant_id=pk):
with rls_transaction(pk):
summary = delete_provider(provider.id)
deletion_summary.update(summary)
Tenant.objects.using(MainRouter.admin_db).filter(id=pk).delete()
return deletion_summary

View File

@@ -11,7 +11,7 @@ from api.compliance import (
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
generate_scan_compliance,
)
from api.db_utils import tenant_transaction
from api.db_utils import rls_transaction
from api.models import (
ComplianceOverview,
Finding,
@@ -69,7 +69,7 @@ def _store_resources(
- tuple[str, str]: A tuple containing the resource UID and region.
"""
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
resource_instance, created = Resource.objects.get_or_create(
tenant_id=tenant_id,
provider=provider_instance,
@@ -86,7 +86,7 @@ def _store_resources(
resource_instance.service = finding.service_name
resource_instance.type = finding.resource_type
resource_instance.save()
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
tags = [
ResourceTag.objects.get_or_create(
tenant_id=tenant_id, key=key, value=value
@@ -122,7 +122,7 @@ def perform_prowler_scan(
unique_resources = set()
start_time = time.time()
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
provider_instance = Provider.objects.get(pk=provider_id)
scan_instance = Scan.objects.get(pk=scan_id)
scan_instance.state = StateChoices.EXECUTING
@@ -130,7 +130,7 @@ def perform_prowler_scan(
scan_instance.save()
try:
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
try:
prowler_provider = initialize_prowler_provider(provider_instance)
provider_instance.connected = True
@@ -156,7 +156,7 @@ def perform_prowler_scan(
for finding in findings:
for attempt in range(CELERY_DEADLOCK_ATTEMPTS):
try:
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
# Process resource
resource_uid = finding.resource_uid
if resource_uid not in resource_cache:
@@ -188,7 +188,7 @@ def perform_prowler_scan(
resource_instance.type = finding.resource_type
updated_fields.append("type")
if updated_fields:
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
resource_instance.save(update_fields=updated_fields)
except (OperationalError, IntegrityError) as db_err:
if attempt < CELERY_DEADLOCK_ATTEMPTS - 1:
@@ -203,7 +203,7 @@ def perform_prowler_scan(
# Update tags
tags = []
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
for key, value in finding.resource_tags.items():
tag_key = (key, value)
if tag_key not in tag_cache:
@@ -219,7 +219,7 @@ def perform_prowler_scan(
unique_resources.add((resource_instance.uid, resource_instance.region))
# Process finding
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
finding_uid = finding.uid
if finding_uid not in last_status_cache:
most_recent_finding = (
@@ -267,7 +267,7 @@ def perform_prowler_scan(
region_dict[finding.check_id] = finding.status.value
# Update scan progress
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
scan_instance.progress = progress
scan_instance.save()
@@ -279,7 +279,7 @@ def perform_prowler_scan(
scan_instance.state = StateChoices.FAILED
finally:
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
scan_instance.duration = time.time() - start_time
scan_instance.completed_at = datetime.now(tz=timezone.utc)
scan_instance.unique_resource_count = len(unique_resources)
@@ -330,7 +330,7 @@ def perform_prowler_scan(
total_requirements=compliance["total_requirements"],
)
)
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
ComplianceOverview.objects.bulk_create(compliance_overview_objects)
if exception is not None:
@@ -368,7 +368,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
- muted_new: Muted findings with a delta of 'new'.
- muted_changed: Muted findings with a delta of 'changed'.
"""
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
findings = Finding.objects.filter(scan_id=scan_id)
aggregation = findings.values(
@@ -464,7 +464,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
),
)
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
scan_aggregations = {
ScanSummary(
tenant_id=tenant_id,

View File

@@ -4,10 +4,10 @@ from celery import shared_task
from config.celery import RLSTask
from django_celery_beat.models import PeriodicTask
from tasks.jobs.connection import check_provider_connection
from tasks.jobs.deletion import delete_provider
from tasks.jobs.deletion import delete_provider, delete_tenant
from tasks.jobs.scan import aggregate_findings, perform_prowler_scan
from api.db_utils import tenant_transaction
from api.db_utils import rls_transaction
from api.decorators import set_tenant
from api.models import Provider, Scan
@@ -99,7 +99,7 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
"""
task_id = self.request.id
with tenant_transaction(tenant_id):
with rls_transaction(tenant_id):
provider_instance = Provider.objects.get(pk=provider_id)
periodic_task_instance = PeriodicTask.objects.get(
name=f"scan-perform-scheduled-{provider_id}"
@@ -134,3 +134,8 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
@shared_task(name="scan-summary")
def perform_scan_summary_task(tenant_id: str, scan_id: str):
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
@shared_task(name="tenant-deletion")
def delete_tenant_task(tenant_id: str):
return delete_tenant(pk=tenant_id)

View File

@@ -1,13 +1,13 @@
import pytest
from django.core.exceptions import ObjectDoesNotExist
from tasks.jobs.deletion import delete_provider
from tasks.jobs.deletion import delete_provider, delete_tenant
from api.models import Provider
from api.models import Provider, Tenant
@pytest.mark.django_db
class TestDeleteInstance:
def test_delete_instance_success(self, providers_fixture):
class TestDeleteProvider:
def test_delete_provider_success(self, providers_fixture):
instance = providers_fixture[0]
result = delete_provider(instance.id)
@@ -15,8 +15,46 @@ class TestDeleteInstance:
with pytest.raises(ObjectDoesNotExist):
Provider.objects.get(pk=instance.id)
def test_delete_instance_does_not_exist(self):
def test_delete_provider_does_not_exist(self):
non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645"
with pytest.raises(ObjectDoesNotExist):
delete_provider(non_existent_pk)
@pytest.mark.django_db
class TestDeleteTenant:
def test_delete_tenant_success(self, tenants_fixture, providers_fixture):
"""
Test successful deletion of a tenant and its related data.
"""
tenant = tenants_fixture[0]
providers = Provider.objects.filter(tenant_id=tenant.id)
# Ensure the tenant and related providers exist before deletion
assert Tenant.objects.filter(id=tenant.id).exists()
assert providers.exists()
# Call the function and validate the result
deletion_summary = delete_tenant(tenant.id)
assert deletion_summary is not None
assert not Tenant.objects.filter(id=tenant.id).exists()
assert not Provider.objects.filter(tenant_id=tenant.id).exists()
def test_delete_tenant_with_no_providers(self, tenants_fixture):
"""
Test deletion of a tenant with no related providers.
"""
tenant = tenants_fixture[1] # Assume this tenant has no providers
providers = Provider.objects.filter(tenant_id=tenant.id)
# Ensure the tenant exists but has no related providers
assert Tenant.objects.filter(id=tenant.id).exists()
assert not providers.exists()
# Call the function and validate the result
deletion_summary = delete_tenant(tenant.id)
assert deletion_summary == {} # No providers, so empty summary
assert not Tenant.objects.filter(id=tenant.id).exists()

View File

@@ -1,3 +1,4 @@
import uuid
from unittest.mock import MagicMock, patch
import pytest
@@ -26,7 +27,7 @@ class TestPerformScan:
providers_fixture,
):
with (
patch("api.db_utils.tenant_transaction"),
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
@@ -165,10 +166,10 @@ class TestPerformScan:
"tasks.jobs.scan.initialize_prowler_provider",
side_effect=Exception("Connection error"),
)
@patch("api.db_utils.tenant_transaction")
@patch("api.db_utils.rls_transaction")
def test_perform_prowler_scan_no_connection(
self,
mock_tenant_transaction,
mock_rls_transaction,
mock_initialize_prowler_provider,
mock_prowler_scan_class,
tenants_fixture,
@@ -205,14 +206,14 @@ class TestPerformScan:
@patch("api.models.ResourceTag.objects.get_or_create")
@patch("api.models.Resource.objects.get_or_create")
@patch("api.db_utils.tenant_transaction")
@patch("api.db_utils.rls_transaction")
def test_store_resources_new_resource(
self,
mock_tenant_transaction,
mock_rls_transaction,
mock_get_or_create_resource,
mock_get_or_create_tag,
):
tenant_id = "tenant123"
tenant_id = uuid.uuid4()
provider_instance = MagicMock()
provider_instance.id = "provider456"
@@ -253,14 +254,14 @@ class TestPerformScan:
@patch("api.models.ResourceTag.objects.get_or_create")
@patch("api.models.Resource.objects.get_or_create")
@patch("api.db_utils.tenant_transaction")
@patch("api.db_utils.rls_transaction")
def test_store_resources_existing_resource(
self,
mock_tenant_transaction,
mock_rls_transaction,
mock_get_or_create_resource,
mock_get_or_create_tag,
):
tenant_id = "tenant123"
tenant_id = uuid.uuid4()
provider_instance = MagicMock()
provider_instance.id = "provider456"
@@ -310,14 +311,14 @@ class TestPerformScan:
@patch("api.models.ResourceTag.objects.get_or_create")
@patch("api.models.Resource.objects.get_or_create")
@patch("api.db_utils.tenant_transaction")
@patch("api.db_utils.rls_transaction")
def test_store_resources_with_tags(
self,
mock_tenant_transaction,
mock_rls_transaction,
mock_get_or_create_resource,
mock_get_or_create_tag,
):
tenant_id = "tenant123"
tenant_id = uuid.uuid4()
provider_instance = MagicMock()
provider_instance.id = "provider456"

11
codecov.yml Normal file
View File

@@ -0,0 +1,11 @@
component_management:
individual_components:
- component_id: "prowler"
paths:
- "prowler/**"
- component_id: "api"
paths:
- "api/**"
comment:
layout: "header, diff, flags, components"

View File

@@ -37,7 +37,7 @@ services:
- 3000:3000
postgres:
image: postgres:16.3-alpine
image: postgres:16.3-alpine3.20
hostname: "postgres-db"
volumes:
- ./_data/postgres:/var/lib/postgresql/data

View File

@@ -25,7 +25,7 @@ services:
- ${UI_PORT:-3000}:${UI_PORT:-3000}
postgres:
image: postgres:16.3-alpine
image: postgres:16.3-alpine3.20
hostname: "postgres-db"
volumes:
- ./_data/postgres:/var/lib/postgresql/data

View File

@@ -51,14 +51,14 @@ For the AWS provider we have ways to test a Prowler check based on the following
We use and contribute to the [Moto](https://github.com/getmoto/moto) library which allows us to easily mock out tests based on AWS infrastructure. **It's awesome!**
- AWS API calls covered by [Moto](https://github.com/getmoto/moto):
- Service tests with `@mock_<service>`
- Checks tests with `@mock_<service>`
- Service tests with `@mock_aws`
- Checks tests with `@mock_aws`
- AWS API calls not covered by Moto:
- Service test with `mock_make_api_call`
- Checks tests with [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock)
- AWS API calls partially covered by Moto:
- Service test with `@mock_<service>` and `mock_make_api_call`
- Checks tests with `@mock_<service>` and `mock_make_api_call`
- Service test with `@mock_aws` and `mock_make_api_call`
- Checks tests with `@mock_aws` and `mock_make_api_call`
In the following section we are going to explain all of the above scenarios with examples. The main difference between those scenarios comes from if the [Moto](https://github.com/getmoto/moto) library covers the AWS API calls made by the service. You can check the covered API calls [here](https://github.com/getmoto/moto/blob/master/IMPLEMENTATION_COVERAGE.md).
@@ -70,7 +70,7 @@ This section is going to be divided based on the API coverage of the [Moto](http
#### API calls covered
If the [Moto](https://github.com/getmoto/moto) library covers the API calls we want to test, we can use the `@mock_<service>` decorator. This will mocked out all the API calls made to AWS keeping the state within the code decorated, in this case the test function.
If the [Moto](https://github.com/getmoto/moto) library covers the API calls we want to test, we can use the `@mock_aws` decorator. This will mocked out all the API calls made to AWS keeping the state within the code decorated, in this case the test function.
```python
# We need to import the unittest.mock to allow us to patch some objects
@@ -80,8 +80,8 @@ from unittest import mock
# Boto3 client and session to call the AWS APIs
from boto3 import client, session
# Moto decorator for the IAM service we want to mock
from moto import mock_iam
# Moto decorator
from moto import mock_aws
# Constants used
AWS_ACCOUNT_NUMBER = "123456789012"
@@ -91,10 +91,8 @@ AWS_REGION = "us-east-1"
# We always name the test classes like Test_<check_name>
class Test_iam_password_policy_uppercase:
# We include the Moto decorator for the service we want to use
# You can include more than one if two or more services are
# involved in test
@mock_iam
# We include the Moto decorator
@mock_aws
# We name the tests with test_<service>_<check_name>_<test_action>
def test_iam_password_policy_no_uppercase_flag(self):
# First, we have to create an IAM client
@@ -238,7 +236,7 @@ To do so, you need to mock the `botocore.client.BaseClient._make_api_call` funct
import boto3
import botocore
from unittest.mock import patch
from moto import mock_iam
from moto import mock_aws
# Original botocore _make_api_call function
orig = botocore.client.BaseClient._make_api_call

View File

@@ -73,6 +73,8 @@ To use each one you need to pass the proper flag to the execution. Prowler for A
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool. It is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
- `Reader`
- `ProwlerRole` (custom role defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json))
???+ note
Please, notice that the field `assignableScopes` in the JSON custom role file must be changed to be the subscription or management group where the role is going to be assigned. The valid formats for the field are `/subscriptions/<subscription-id>` or `/providers/Microsoft.Management/managementGroups/<management-group-id>`.
To assign the permissions, follow the instructions in the [Microsoft Entra ID permissions](../tutorials/azure/create-prowler-service-principal.md#assigning-the-proper-permissions) section and the [Azure subscriptions permissions](../tutorials/azure/subscriptions.md#assigning-proper-permissions) section, respectively.

BIN
docs/img/compliance.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 274 KiB

View File

@@ -1,4 +1,4 @@
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
@@ -29,7 +29,7 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
Prowler App can be installed in different ways, depending on your environment:
> See how to use Prowler App in the [Prowler App](tutorials/prowler-app.md) section.
> See how to use Prowler App in the [Prowler App Tutorial](tutorials/prowler-app.md) section.
=== "Docker Compose"
@@ -45,6 +45,8 @@ Prowler App can be installed in different ways, depending on your environment:
docker compose up -d
```
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment.
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
???+ note
@@ -65,6 +67,9 @@ Prowler App can be installed in different ways, depending on your environment:
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
???+ warning
Make sure to have `api/.env` and `ui/.env.local` files with the required environment variables. You can find the required environment variables in the [`api/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/api/.env.example) and [`ui/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/ui/.env.template) files.
_Commands to run the API_:
``` bash
@@ -95,6 +100,19 @@ Prowler App can be installed in different ways, depending on your environment:
python -m celery -A config.celery worker -l info -E
```
_Commands to run the API Scheduler_:
``` bash
git clone https://github.com/prowler-cloud/prowler \
cd prowler/api \
poetry install \
poetry shell \
set -a \
source .env \
cd src/backend \
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
```
_Commands to run the UI_:
``` bash
@@ -107,9 +125,6 @@ Prowler App can be installed in different ways, depending on your environment:
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
???+ warning
Make sure to have `api/.env` and `ui/.env.local` files with the required environment variables. You can find the required environment variables in the [`api/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/api/.env.example) and [`ui/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/ui/.env.template) files.
???+ warning
Google and GitHub authentication is only available in [Prowler Cloud](https://prowler.com).
@@ -373,8 +388,8 @@ After successfully adding and testing your credentials, Prowler will start scann
#### **View Results**
While the scan is running, start exploring the findings in these sections:
- **Overview**: High-level summary of the scans. <img src="../../img/overview.png" alt="Overview" width="700"/>
- **Compliance**: Insights into compliance status. <img src="../../img/compliance.png" alt="Compliance" width="700"/>
- **Overview**: High-level summary of the scans. <img src="img/overview.png" alt="Overview" width="700"/>
- **Compliance**: Insights into compliance status. <img src="img/compliance.png" alt="Compliance" width="700"/>
> See more details about the Prowler App usage in the [Prowler App](tutorials/prowler-app.md) section.

View File

@@ -13,7 +13,7 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
## Reporting Vulnerabilities
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or Prowler SaaS service, please submit the information by contacting to us via [**support.prowler.com**](http://support.prowler.com).
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or Prowler Cloud service, please submit the information by contacting to us via [**support.prowler.com**](http://support.prowler.com).
The information you share with the Prowler team as part of this process is kept confidential within Prowler. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.

View File

@@ -22,32 +22,31 @@ In order to see which compliance frameworks are cover by Prowler, you can use op
```sh
prowler <provider> --list-compliance
```
Currently, the available frameworks are:
### AWS
- `aws_account_security_onboarding_aws`
- `aws_audit_manager_control_tower_guardrails_aws`
- `aws_foundational_security_best_practices_aws`
- `aws_foundational_technical_review_aws`
- `aws_well_architected_framework_reliability_pillar_aws`
- `aws_well_architected_framework_security_pillar_aws`
- `cis_1.4_aws`
- `cis_1.5_aws`
- `cis_2.0_aws`
- `cis_2.0_gcp`
- `cis_2.0_azure`
- `cis_2.1_azure`
- `cis_3.0_aws`
- `cis_1.8_kubernetes`
- `cisa_aws`
- `ens_rd2022_aws`
- `fedramp_low_revision_4_aws`
- `fedramp_moderate_revision_4_aws`
- `ffiec_aws`
- `aws_foundational_technical_review_aws`
- `gdpr_aws`
- `gxp_21_cfr_part_11_aws`
- `gxp_eu_annex_11_aws`
- `hipaa_aws`
- `iso27001_2013_aws`
- `kisa_isms_p_2023_aws`
- `kisa_isms_p_2023_korean_aws`
- `mitre_attack_aws`
- `nist_800_171_revision_2_aws`
- `nist_800_53_revision_4_aws`
@@ -57,6 +56,23 @@ Currently, the available frameworks are:
- `rbi_cyber_security_framework_aws`
- `soc2_aws`
### Azure
- `cis_2.0_azure`
- `cis_2.1_azure`
- `ens_rd2022_azure`
- `mitre_attack_azure`
### GCP
- `cis_2.0_gcp`
- `ens_rd2022_gcp`
- `mitre_attack_gcp`
### Kubernetes
- `cis_1.8_kubernetes`
## List Requirements of Compliance Frameworks
For each compliance framework, you can use option `--list-compliance-requirements` to list its requirements:
```sh

View File

@@ -75,6 +75,7 @@ The following list includes all the Azure checks with configurable variables tha
| `app_ensure_php_version_is_latest` | `php_latest_version` | String |
| `app_ensure_python_version_is_latest` | `python_latest_version` | String |
| `app_ensure_java_version_is_latest` | `java_latest_version` | String |
| `sqlserver_recommended_minimal_tls_version` | `recommended_minimal_tls_versions` | List of Strings |
## GCP
@@ -447,6 +448,14 @@ azure:
# azure.app_ensure_java_version_is_latest
java_latest_version: "17"
# Azure SQL Server
# azure.sqlserver_minimal_tls_version
recommended_minimal_tls_versions:
[
"1.2",
"1.3"
]
# GCP Configuration
gcp:
# GCP Compute Configuration

View File

@@ -42,6 +42,7 @@ Mutelist:
Resources:
- "user-1" # Will mute user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will mute user-2 in check iam_user_hardware_mfa_enabled
Description: "Findings related with the check iam_user_hardware_mfa_enabled will be muted for us-east-1 region and user-1, user-2 resources"
"ec2_*":
Regions:
- "*"
@@ -140,6 +141,9 @@ Mutelist:
| `resource` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
| `tag` | The tag value. | `ORed` |
### Description
This field can be used to add information or some hints for the Mutelist rule.
## How to Use the Mutelist
@@ -171,6 +175,7 @@ If you want to mute failed findings only in specific regions, create a file with
- "ap-southeast-2"
Resources:
- "*"
Description: "Description related with the muted findings for the check"
### Default Mutelist
For the AWS Provider, Prowler is executed with a default AWS Mutelist with the AWS Resources that should be muted such as all resources created by AWS Control Tower when setting up a landing zone that can be found in [AWS Documentation](https://docs.aws.amazon.com/controltower/latest/userguide/shared-account-resources.html).

View File

@@ -5,6 +5,9 @@ The **Prowler App** is a user-friendly interface for the Prowler CLI, providing
After [installing](../index.md#prowler-app-installation) the **Prowler App**, access it at [http://localhost:3000](http://localhost:3000).
You can also access to the auto-generated **Prowler API** documentation at [http://localhost:8080/api/v1/docs](http://localhost:8080/api/v1/docs) to see all the available endpoints, parameters and responses.
???+ note
If you are a [Prowler Cloud](https://cloud.prowler.com/sign-in) user you can see API docs at [https://api.prowler.com/api/v1/docs](https://api.prowler.com/api/v1/docs)
## **Step 1: Sign Up**
To get started, sign up using your email and password:

View File

@@ -34,7 +34,6 @@ theme:
icon: material/weather-sunny
name: Switch to light mode
plugins:
- search
- git-revision-date-localized:
@@ -112,7 +111,7 @@ nav:
- Contact Us: contact.md
- Troubleshooting: troubleshooting.md
- About: about.md
- Prowler SaaS: https://prowler.com
- Prowler Cloud: https://prowler.com
# Customization
extra:

View File

@@ -95,6 +95,7 @@ Resources:
- 'servicecatalog:List*'
- 'ssm:GetDocument'
- 'ssm-incidents:List*'
- 'states:ListTagsForResource'
- 'support:Describe*'
- 'tag:GetTagKeys'
- 'wellarchitected:List*'

View File

@@ -45,6 +45,7 @@
"servicecatalog:List*",
"ssm:GetDocument",
"ssm-incidents:List*",
"states:ListTagsForResource",
"support:Describe*",
"tag:GetTagKeys",
"wellarchitected:List*"

View File

@@ -3,7 +3,7 @@
"roleName": "ProwlerRole",
"description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
"assignableScopes": [
"/"
"/{'subscriptions', 'providers/Microsoft.Management/managementGroups'}/{Your Subscription or Management Group ID}"
],
"permissions": [
{

201
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -775,17 +775,17 @@ files = [
[[package]]
name = "boto3"
version = "1.35.71"
version = "1.35.81"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "boto3-1.35.71-py3-none-any.whl", hash = "sha256:e2969a246bb3208122b3c349c49cc6604c6fc3fc2b2f65d99d3e8ccd745b0c16"},
{file = "boto3-1.35.71.tar.gz", hash = "sha256:3ed7172b3d4fceb6218bb0ec3668c4d40c03690939c2fca4f22bb875d741a07f"},
{file = "boto3-1.35.81-py3-none-any.whl", hash = "sha256:742941b2424c0223d2d94a08c3485462fa7c58d816b62ca80f08e555243acee1"},
{file = "boto3-1.35.81.tar.gz", hash = "sha256:d2e95fa06f095b8e0c545dd678c6269d253809b2997c30f5ce8a956c410b4e86"},
]
[package.dependencies]
botocore = ">=1.35.71,<1.36.0"
botocore = ">=1.35.81,<1.36.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -794,13 +794,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.35.71"
version = "1.35.83"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
{file = "botocore-1.35.71-py3-none-any.whl", hash = "sha256:fc46e7ab1df3cef66dfba1633f4da77c75e07365b36f03bd64a3793634be8fc1"},
{file = "botocore-1.35.71.tar.gz", hash = "sha256:f9fa058e0393660c3fe53c1e044751beb64b586def0bd2212448a7c328b0cbba"},
{file = "botocore-1.35.83-py3-none-any.whl", hash = "sha256:ba363183e4df79fbcfd5f3600fd473bd45a1de03d0d0b5e78abd59f276971d27"},
{file = "botocore-1.35.83.tar.gz", hash = "sha256:df5e4384838e50bbafd47e9b5fefb995e83cbb9412e7cd7c0db9555174d91bba"},
]
[package.dependencies]
@@ -1099,73 +1099,73 @@ files = [
[[package]]
name = "coverage"
version = "7.6.8"
version = "7.6.9"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.9"
files = [
{file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"},
{file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"},
{file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"},
{file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"},
{file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"},
{file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"},
{file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"},
{file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"},
{file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"},
{file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"},
{file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"},
{file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"},
{file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"},
{file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"},
{file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"},
{file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"},
{file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"},
{file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"},
{file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"},
{file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"},
{file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"},
{file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"},
{file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"},
{file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"},
{file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"},
{file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"},
{file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"},
{file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"},
{file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"},
{file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"},
{file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"},
{file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"},
{file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"},
{file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"},
{file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"},
{file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"},
{file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"},
{file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"},
{file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"},
{file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"},
{file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"},
{file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"},
{file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"},
{file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"},
{file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"},
{file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"},
{file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"},
{file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"},
{file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"},
{file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"},
{file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"},
{file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"},
{file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"},
{file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"},
{file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"},
{file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"},
{file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"},
{file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"},
{file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"},
{file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"},
{file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"},
{file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"},
{file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"},
{file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"},
{file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"},
{file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"},
{file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"},
{file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"},
{file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"},
{file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"},
{file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"},
{file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"},
{file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"},
{file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"},
{file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"},
{file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"},
{file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"},
{file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"},
{file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"},
{file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"},
{file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"},
{file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"},
{file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"},
{file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"},
{file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"},
{file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"},
{file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"},
{file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"},
]
[package.dependencies]
@@ -1719,13 +1719,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
[[package]]
name = "google-api-python-client"
version = "2.154.0"
version = "2.155.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "google_api_python_client-2.154.0-py2.py3-none-any.whl", hash = "sha256:a521bbbb2ec0ba9d6f307cdd64ed6e21eeac372d1bd7493a4ab5022941f784ad"},
{file = "google_api_python_client-2.154.0.tar.gz", hash = "sha256:1b420062e03bfcaa1c79e2e00a612d29a6a934151ceb3d272fe150a656dc8f17"},
{file = "google_api_python_client-2.155.0-py2.py3-none-any.whl", hash = "sha256:83fe9b5aa4160899079d7c93a37be306546a17e6686e2549bcc9584f1a229747"},
{file = "google_api_python_client-2.155.0.tar.gz", hash = "sha256:25529f89f0d13abcf3c05c089c423fb2858ac16e0b3727543393468d0d7af67c"},
]
[package.dependencies]
@@ -2403,13 +2403,13 @@ files = [
[[package]]
name = "microsoft-kiota-abstractions"
version = "1.6.2"
version = "1.6.6"
description = "Core abstractions for kiota generated libraries in Python"
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "microsoft_kiota_abstractions-1.6.2-py3-none-any.whl", hash = "sha256:8c2c777748e80f17dba3809b5d149585d9918198f0f94125e87432f7165ba80e"},
{file = "microsoft_kiota_abstractions-1.6.2.tar.gz", hash = "sha256:dec30f0fb427a051003e94b5c6fcf266f4702ecbd9d6961e3966124b9cbe41bf"},
{file = "microsoft_kiota_abstractions-1.6.6-py3-none-any.whl", hash = "sha256:29071715baf0d604c381c5d17be47f35e6e63a441dcfb5e9141963406b469d50"},
{file = "microsoft_kiota_abstractions-1.6.6.tar.gz", hash = "sha256:2554495b00c9c25b43f6964a71b65c89a277bd6b50f4d0028a7febcec6c4fd67"},
]
[package.dependencies]
@@ -2583,13 +2583,13 @@ dev = ["click", "codecov", "mkdocs-gen-files", "mkdocs-git-authors-plugin", "mkd
[[package]]
name = "mkdocs-material"
version = "9.5.46"
version = "9.5.49"
description = "Documentation that simply works"
optional = false
python-versions = ">=3.8"
files = [
{file = "mkdocs_material-9.5.46-py3-none-any.whl", hash = "sha256:98f0a2039c62e551a68aad0791a8d41324ff90c03a6e6cea381a384b84908b83"},
{file = "mkdocs_material-9.5.46.tar.gz", hash = "sha256:ae2043f4238e572f9a40e0b577f50400d6fc31e2fef8ea141800aebf3bd273d7"},
{file = "mkdocs_material-9.5.49-py3-none-any.whl", hash = "sha256:c3c2d8176b18198435d3a3e119011922f3e11424074645c24019c2dcf08a360e"},
{file = "mkdocs_material-9.5.49.tar.gz", hash = "sha256:3671bb282b4f53a1c72e08adbe04d2481a98f85fed392530051f80ff94a9621d"},
]
[package.dependencies]
@@ -2769,13 +2769,13 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.12.0"
version = "1.14.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.8"
files = [
{file = "msgraph_sdk-1.12.0-py3-none-any.whl", hash = "sha256:ac298b546b240391b0e407379d039db32862a56d6fe15cf7c5f7e77631fc6771"},
{file = "msgraph_sdk-1.12.0.tar.gz", hash = "sha256:fbb5a8a9f6eed89b496f207eb35b6b4cfc7fefa75608aeef07477a3b2276d4fa"},
{file = "msgraph_sdk-1.14.0-py3-none-any.whl", hash = "sha256:1a2f327dc8fbe5a5e6d0d84cf71d605e7b118b3066b1e16f011ccd8fd927bb03"},
{file = "msgraph_sdk-1.14.0.tar.gz", hash = "sha256:5bbda80941c5d1794682753b8b291bd2ebed719a43d6de949fd0cd613b6dfbbd"},
]
[package.dependencies]
@@ -3796,17 +3796,17 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
[[package]]
name = "pylint"
version = "3.3.1"
version = "3.3.2"
description = "python code static checker"
optional = false
python-versions = ">=3.9.0"
files = [
{file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"},
{file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"},
{file = "pylint-3.3.2-py3-none-any.whl", hash = "sha256:77f068c287d49b8683cd7c6e624243c74f92890f767f106ffa1ddf3c0a54cb7a"},
{file = "pylint-3.3.2.tar.gz", hash = "sha256:9ec054ec992cd05ad30a6df1676229739a73f8feeabf3912c995d17601052b01"},
]
[package.dependencies]
astroid = ">=3.3.4,<=3.4.0-dev0"
astroid = ">=3.3.5,<=3.4.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
@@ -3858,13 +3858,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "8.3.3"
version = "8.3.4"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
{file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
{file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
]
[package.dependencies]
@@ -4458,6 +4458,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -4466,6 +4467,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -4474,6 +4476,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -4482,6 +4485,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -4490,6 +4494,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
@@ -4638,17 +4643,17 @@ files = [
[[package]]
name = "slack-sdk"
version = "3.33.4"
version = "3.33.5"
description = "The Slack API Platform SDK for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "slack_sdk-3.33.4-py2.py3-none-any.whl", hash = "sha256:9f30cb3c9c07b441c49d53fc27f9f1837ad1592a7e9d4ca431f53cdad8826cc6"},
{file = "slack_sdk-3.33.4.tar.gz", hash = "sha256:5e109847f6b6a22d227609226ba4ed936109dc00675bddeb7e0bee502d3ee7e0"},
{file = "slack_sdk-3.33.5-py2.py3-none-any.whl", hash = "sha256:b8cccadfa3d4005a5e6529f52000d25c583f46173fda8e9136fdd2bc58923ff6"},
{file = "slack_sdk-3.33.5.tar.gz", hash = "sha256:a5e74c00c99dc844ad93e501ab764a20d86fa8184bbc9432af217496f632c4ee"},
]
[package.extras]
optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<14)"]
optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<15)"]
[[package]]
name = "smmap"
@@ -4888,13 +4893,13 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "vulture"
version = "2.13"
version = "2.14"
description = "Find dead code"
optional = false
python-versions = ">=3.8"
files = [
{file = "vulture-2.13-py2.py3-none-any.whl", hash = "sha256:34793ba60488e7cccbecdef3a7fe151656372ef94fdac9fe004c52a4000a6d44"},
{file = "vulture-2.13.tar.gz", hash = "sha256:78248bf58f5eaffcc2ade306141ead73f437339950f80045dce7f8b078e5a1aa"},
{file = "vulture-2.14-py2.py3-none-any.whl", hash = "sha256:d9a90dba89607489548a49d557f8bac8112bd25d3cbc8aeef23e860811bd5ed9"},
{file = "vulture-2.14.tar.gz", hash = "sha256:cb8277902a1138deeab796ec5bef7076a6e0248ca3607a3f3dee0b6d9e9b8415"},
]
[package.dependencies]
@@ -5194,4 +5199,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.13"
content-hash = "1acc901866ecfc2c0f3576b9e442d7a3b6e6522cac3d4d1b9301ed4232755cba"
content-hash = "c3ff27de93b161a7644f5d82ba76f503f859cb79f6c5dd5b18bfcc2ba40a4479"

View File

@@ -10,6 +10,7 @@ Mutelist:
- "*"
Resources:
- "aws-controltower-NotificationForwarder"
Description: "Checks from AWS lambda functions muted by default"
"cloudformation_stack*":
Regions:
- "*"

View File

@@ -14,6 +14,7 @@ Mutelist:
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
Description: "Check iam_user_hardware_mfa_enabled muted for region us-east-1 and resources user-1, user-2"
"ec2_*":
Regions:
- "*"

View File

@@ -15,6 +15,7 @@ Mutelist:
Resources:
- "sqlserver1" # Will ignore sqlserver1 in check sqlserver_tde_encryption_enabled located in westeurope
- "sqlserver2" # Will ignore sqlserver2 in check sqlserver_tde_encryption_enabled located in westeurope
Description: "Findings related with the check sqlserver_tde_encryption_enabled is muted for westeurope region and sqlserver1, sqlserver2 resources"
"defender_*":
Regions:
- "*"

View File

@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "5.0.0"
prowler_version = "5.1.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -388,6 +388,14 @@ azure:
# azure.app_ensure_java_version_is_latest
java_latest_version: "17"
# Azure SQL Server
# azure.sqlserver_minimal_tls_version
recommended_minimal_tls_versions:
[
"1.2",
"1.3",
]
# GCP Configuration
gcp:
# GCP Compute Configuration

View File

@@ -15,6 +15,7 @@ Mutelist:
Resources:
- "instance1" # Will ignore instance1 in check compute_instance_public_ip located in europe-southwest1
- "instance2" # Will ignore instance2 in check compute_instance_public_ip located in europe-southwest1
Description: "Findings related with the check compute_instance_public_ip will be muted for europe-southwest1 region and instance1, instance2 resources"
"iam_*":
Regions:
- "*"

View File

@@ -15,6 +15,7 @@ Mutelist:
Resources:
- "prowler-pod1" # Will ignore prowler-pod1 in check core_minimize_allowPrivilegeEscalation_containers located in namespace1
- "prowler-pod2" # Will ignore prowler-pod2 in check core_minimize_allowPrivilegeEscalation_containers located in namespace1
Description: "Findings related with the check core_minimize_allowPrivilegeEscalation_containers will be muted for namespace1 region and prowler-pod1, prowler-pod2 resources"
"kubelet_*":
Regions:
- "*"

View File

@@ -15,6 +15,7 @@ mutelist_schema = Schema(
Optional("Resources"): list,
Optional("Tags"): list,
},
Optional("Description"): str,
}
}
}

View File

@@ -106,6 +106,7 @@ class Mutelist(ABC):
- 'i-123456789'
Tags:
- 'Name=AdminInstance | Environment=Prod'
Description: 'Field to describe why the findings associated with these values are muted'
```
The check `ec2_instance_detailed_monitoring_enabled` will be muted for all accounts and regions and for the resource_id 'i-123456789' with at least one of the tags 'Name=AdminInstance' or 'Environment=Prod'.

View File

@@ -94,11 +94,12 @@ def get_cis_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -95,11 +95,12 @@ def get_ens_table(
print(
f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) NO CUMPLE{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) CUMPLE{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) NO CUMPLE{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) CUMPLE{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -39,11 +39,12 @@ def get_generic_compliance_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -61,11 +61,12 @@ def get_kisa_ismsp_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -69,11 +69,12 @@ def get_mitre_attack_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -908,6 +908,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1260,6 +1261,15 @@
"aws-us-gov": []
}
},
"bcm-pricing-calculator": {
"regions": {
"aws": [
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"bedrock": {
"regions": {
"aws": [
@@ -7315,6 +7325,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -7344,6 +7355,31 @@
]
}
},
"networkflowmonitor": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"networkmanager": {
"regions": {
"aws": [
@@ -7441,6 +7477,15 @@
"aws-us-gov": []
}
},
"notificationscontacts": {
"regions": {
"aws": [
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"oam": {
"regions": {
"aws": [
@@ -7485,6 +7530,23 @@
]
}
},
"observabilityadmin": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-southeast-1",
"ap-southeast-2",
"eu-central-1",
"eu-north-1",
"eu-west-1",
"us-east-1",
"us-east-2",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"omics": {
"regions": {
"aws": [
@@ -11097,11 +11159,14 @@
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"ca-west-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",

View File

@@ -8,19 +8,20 @@ class autoscaling_group_launch_configuration_no_public_ip(Check):
def execute(self):
findings = []
for group in autoscaling_client.groups:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "PASS"
report.status_extended = f"Autoscaling group {group.name} does not have an associated launch configuration assigning a public IP address."
for lc in autoscaling_client.launch_configurations.values():
if lc.name == group.launch_configuration_name and lc.public_ip:
report.status = "FAIL"
report.status_extended = f"Autoscaling group {group.name} has an associated launch configuration assigning a public IP address."
if lc.name == group.launch_configuration_name:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "PASS"
report.status_extended = f"Autoscaling group {group.name} does not have an associated launch configuration assigning a public IP address."
findings.append(report)
if lc.public_ip:
report.status = "FAIL"
report.status_extended = f"Autoscaling group {group.name} has an associated launch configuration assigning a public IP address."
findings.append(report)
return findings

View File

@@ -8,20 +8,17 @@ class autoscaling_group_launch_configuration_requires_imdsv2(Check):
def execute(self):
findings = []
for group in autoscaling_client.groups:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "FAIL"
report.status_extended = (
f"Autoscaling group {group.name} has IMDSv2 disabled or not required."
)
for (
launch_configuration
) in autoscaling_client.launch_configurations.values():
if launch_configuration.name == group.launch_configuration_name:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "FAIL"
report.status_extended = f"Autoscaling group {group.name} has IMDSv2 disabled or not required."
if (
launch_configuration.http_endpoint == "enabled"
and launch_configuration.http_tokens == "required"
@@ -32,6 +29,6 @@ class autoscaling_group_launch_configuration_requires_imdsv2(Check):
report.status = "PASS"
report.status_extended = f"Autoscaling group {group.name} has metadata service disabled."
findings.append(report)
findings.append(report)
return findings

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_inside_vpc",
"CheckTitle": "Ensure AWS Lambda Functions Are Deployed Inside a VPC",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "low",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
"CheckTitle": "Check if Lambda functions invoke API operations are being recorded by CloudTrail.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "low",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_no_secrets_in_code",
"CheckTitle": "Find secrets in Lambda functions code.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "critical",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_no_secrets_in_variables",
"CheckTitle": "Find secrets in Lambda functions variables.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "critical",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_not_publicly_accessible",
"CheckTitle": "Check if Lambda functions have resource-based policy set as Public.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "critical",

View File

@@ -0,0 +1,61 @@
import json
from prowler.lib.logger import logger
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
def fixer(resource_id: str, region: str) -> bool:
"""
Remove the Lambda function's resource-based policy to prevent public access and add a new permission for the account.
Specifically, this fixer deletes all permission statements associated with the Lambda function's policy and then adds a new permission.
Requires the lambda:RemovePermission and lambda:AddPermission permissions.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "lambda:RemovePermission",
"Resource": "*"
},
{
"Effect": "Allow",
"Action": "lambda:AddPermission",
"Resource": "*"
}
]
}
Args:
resource_id (str): The Lambda function name or ARN.
region (str): AWS region where the Lambda function exists.
Returns:
bool: True if the operation is successful (policy removed and permission added), False otherwise.
"""
try:
account_id = awslambda_client.audited_account
regional_client = awslambda_client.regional_clients[region]
policy_response = regional_client.get_policy(FunctionName=resource_id)
policy = json.loads(policy_response.get("Policy"))
for statement in policy.get("Statement", []):
statement_id = statement.get("Sid")
if statement_id:
regional_client.remove_permission(
FunctionName=resource_id, StatementId=statement_id
)
regional_client.add_permission(
FunctionName=resource_id,
StatementId="ProwlerFixerStatement",
Principal=account_id,
Action="lambda:InvokeFunction",
)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_url_cors_policy",
"CheckTitle": "Check Lambda Function URL CORS configuration.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "medium",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_url_public",
"CheckTitle": "Check Public Lambda Function URL.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "high",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_using_supported_runtimes",
"CheckTitle": "Find obsolete Lambda runtimes.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "medium",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_vpc_multi_az",
"CheckTitle": "Check if AWS Lambda Function VPC is deployed Across Multiple Availability Zones",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "medium",

View File

@@ -8,14 +8,14 @@ class backup_recovery_point_encrypted(Check):
for recovery_point in backup_client.recovery_points:
report = Check_Report_AWS(self.metadata())
report.region = recovery_point.backup_vault_region
report.resource_id = recovery_point.backup_vault_name
report.resource_id = recovery_point.id
report.resource_arn = recovery_point.arn
report.resource_tags = recovery_point.tags
report.status = "FAIL"
report.status_extended = f"Backup Recovery Point {recovery_point.arn} for Backup Vault {recovery_point.backup_vault_name} is not encrypted at rest."
report.status_extended = f"Backup Recovery Point {recovery_point.id} for Backup Vault {recovery_point.backup_vault_name} is not encrypted at rest."
if recovery_point.encrypted:
report.status = "PASS"
report.status_extended = f"Backup Recovery Point {recovery_point.arn} for Backup Vault {recovery_point.backup_vault_name} is encrypted at rest."
report.status_extended = f"Backup Recovery Point {recovery_point.id} for Backup Vault {recovery_point.backup_vault_name} is encrypted at rest."
findings.append(report)

View File

@@ -183,21 +183,27 @@ class Backup(AWSService):
def _list_recovery_points(self, regional_client):
logger.info("Backup - Listing Recovery Points...")
try:
for backup_vault in self.backup_vaults:
paginator = regional_client.get_paginator(
"list_recovery_points_by_backup_vault"
)
for page in paginator.paginate(BackupVaultName=backup_vault.name):
for recovery_point in page.get("RecoveryPoints", []):
self.recovery_points.append(
RecoveryPoint(
arn=recovery_point.get("RecoveryPointArn"),
backup_vault_name=backup_vault.name,
encrypted=recovery_point.get("IsEncrypted", False),
backup_vault_region=backup_vault.region,
tags=[],
)
)
if self.backup_vaults:
for backup_vault in self.backup_vaults:
paginator = regional_client.get_paginator(
"list_recovery_points_by_backup_vault"
)
for page in paginator.paginate(BackupVaultName=backup_vault.name):
for recovery_point in page.get("RecoveryPoints", []):
arn = recovery_point.get("RecoveryPointArn")
if arn:
self.recovery_points.append(
RecoveryPoint(
arn=arn,
id=arn.split(":")[-1],
backup_vault_name=backup_vault.name,
encrypted=recovery_point.get(
"IsEncrypted", False
),
backup_vault_region=backup_vault.region,
tags=[],
)
)
except ClientError as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -241,6 +247,7 @@ class BackupReportPlan(BaseModel):
class RecoveryPoint(BaseModel):
arn: str
id: str
backup_vault_name: str
encrypted: bool
backup_vault_region: str

Some files were not shown because too many files have changed in this diff Show More