Compare commits

..

88 Commits

Author SHA1 Message Date
Pablo Lara
a75755c8c5 WIP: add change role to the user's invitations 2024-12-15 10:37:51 +01:00
Pablo Lara
3e0568f381 chore: add change role to the user's invitations 2024-12-13 12:38:08 +01:00
Pablo Lara
fec66a3685 Merge branch 'master' into PRWLR-4669-Roles-Page-API-UI 2024-12-13 06:02:29 +01:00
Pepe Fagoaga
554491a642 chore(gha): build and push OSS UI (#6168) 2024-12-12 19:10:44 +01:00
Pedro Martín
dc4e2f3c85 feat(GHA): build containers for API (#6032)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-12-12 19:05:25 +01:00
Daniel Barranquero
7d2c50991b feat(s3): add new fixer s3_bucket_public_access_fixer (#6164)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-12 12:17:41 -04:00
Pedro Martín
83c204e010 fix(rds): add invalid SG to status_extended (#6157) 2024-12-12 11:51:09 -04:00
dependabot[bot]
316eb049dd chore(deps): bump botocore from 1.35.78 to 1.35.79 (#6153)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 11:29:23 -04:00
Daniel Barranquero
be347b2428 feat(ec2): add new check ec2_launch_template_imdsv2_required (#6139)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-12 11:27:20 -04:00
Daniel Barranquero
a90c772827 feat(s3): add new fixer s3_bucket_public_list_acl_fixer (#6166) 2024-12-12 11:16:46 -04:00
Daniel Barranquero
26c70976c0 feat(s3): add new fixer s3_bucket_public_write_acl_fixer (#5855) 2024-12-12 11:10:43 -04:00
dependabot[bot]
657310dc25 chore(deps): bump boto3 from 1.35.77 to 1.35.78 (#6154)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 10:39:22 -04:00
Daniel Barranquero
6e595eaf92 feat(ec2): add new fixer ec2_instance_port_cifs_exposed_to_internet_fixer (#6159) 2024-12-12 09:22:56 -04:00
Prowler Bot
997831e33d chore(regions_update): Changes in regions for AWS services (#6158)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2024-12-12 09:10:46 -04:00
dependabot[bot]
5920cdc48f chore(deps): bump trufflesecurity/trufflehog from 3.86.0 to 3.86.1 (#6156)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 09:10:20 -04:00
dependabot[bot]
971e73f9cb chore(deps): bump google-api-python-client from 2.154.0 to 2.155.0 (#6155)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-12 09:09:51 -04:00
Mads Brouer Lundholm
bd9673c9de fix(aurora): Add default ports to the check of using non default ports (#5821)
Co-authored-by: Mads Rantala Lundholm <mao@bankdata.dk>
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2024-12-11 13:01:45 -04:00
johannes-engler-mw
eded97d735 feat(azure): check for minimal TLS version for Azure SQL server (#5745)
Co-authored-by: Rubén De la Torre Vico <ruben@prowler.com>
2024-12-11 16:37:53 +01:00
Daniel Barranquero
fdb1956b0b feat(opensearch): add new fixer opensearch_service_domains_not_publicly_accessible_fixer (#5926) 2024-12-11 11:29:48 -04:00
Daniel Barranquero
a915c04e9e fix(autoscaling): autoscaling_group_launch_configuration_requires_imdsv2 fails if Launch Template is used (#6111)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-11 11:18:30 -04:00
Daniel Barranquero
07178ac69a feat(glacier): add new fixer glacier_vaults_policy_public_access_fixer (#5950) 2024-12-11 11:10:12 -04:00
Daniel Barranquero
9b434d4856 feat(ecr): add new fixer ecr_repositories_not_publicly_accessible_fixer (#5923) 2024-12-11 10:42:11 -04:00
dependabot[bot]
0758e97628 chore(deps): bump botocore from 1.35.77 to 1.35.78 (#6132)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-11 10:19:37 -04:00
Sergio Garcia
b486007f95 fix(README): show latest release (#6145) 2024-12-11 10:19:06 -04:00
dependabot[bot]
0c0887afef chore(deps): bump trufflesecurity/trufflehog from 3.85.0 to 3.86.0 (#6130)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-11 09:29:14 -04:00
dependabot[bot]
805ed81031 chore(deps): bump boto3 from 1.35.76 to 1.35.77 (#6131)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-11 09:26:07 -04:00
Prowler Bot
ec3fddf5b1 chore(regions_update): Changes in regions for AWS services (#6136)
Co-authored-by: MrCloudSec <38561120+MrCloudSec@users.noreply.github.com>
2024-12-11 09:25:17 -04:00
Rubén De la Torre Vico
d7b0bc02ba feat(app): add support for TLS 1.3 to Web Apps check (#6004) 2024-12-11 13:14:29 +01:00
Pablo Lara
4d1c8eae8f feat(users): user detail can be edited now properly (#6135) 2024-12-11 10:05:30 +01:00
Sergio Garcia
989ccf4ae3 fix(iam): set unique resource id for each user access key (#6128) 2024-12-11 09:13:49 +01:00
Pablo Lara
ba335de6b3 chore: fix error with exports 2024-12-11 08:30:12 +01:00
Pedro Martín
9c089756c3 fix(compliance_tables): add correct values for findings (#6122)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-10 15:40:45 -04:00
Hugo Pereira Brito
8d4b0914a8 fix(aws): get firewall manager managed rule groups (#6119) 2024-12-10 15:34:22 -04:00
Hugo Pereira Brito
1ae3f89aab fix(aws): check AWS Owned keys in firehose_stream_encrypted_at_rest (#6108) 2024-12-10 13:42:13 -04:00
Pablo Lara
93051d55d5 feat: add role when invite an user 2024-12-10 18:14:57 +01:00
Daniel Barranquero
b984f0423a feat(sqs): add new fixer sqs_queues_not_publicly_accessible_fixer (#5911)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-10 12:26:42 -04:00
Sergio Garcia
f2f196cfcd fix(aws): set IAM identity as resource in threat detection (#6048) 2024-12-10 17:03:01 +01:00
dependabot[bot]
6471d936bb chore(deps): bump msgraph-sdk from 1.12.0 to 1.14.0 (#5957)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 11:42:40 -04:00
Adrián Jesús Peña Rodríguez
21bbdccc41 fix(deploy): temporal fix for the alpine-python segmentation fault (#6109) 2024-12-10 16:27:52 +01:00
Sergio Garcia
48946fa4f7 fix(gcp): make sure default project is active (#6097) 2024-12-10 11:06:48 -04:00
dependabot[bot]
9312dda7c2 chore(deps): bump microsoft-kiota-abstractions from 1.6.2 to 1.6.6 (#6038)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 10:37:04 -04:00
dependabot[bot]
e3013329ee chore(deps): bump botocore from 1.35.76 to 1.35.77 (#6098)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 09:26:36 -04:00
Pablo Lara
161c56ffe4 feat: add permission column to roles table 2024-12-10 11:28:10 +01:00
Pablo Lara
e306322630 Merge branch 'PRWLR-5688-Implement-permission-status-filter' into PRWLR-4669-Roles-Page-API-UI 2024-12-10 10:47:14 +01:00
Adrián Jesús Peña Rodríguez
b4eb6e8076 feat(rbac): add permission_state field to role serializer 2024-12-10 10:45:09 +01:00
Pablo Lara
b54e9334b9 chore: add and edit roles is working now 2024-12-10 10:44:34 +01:00
Adrián Jesús Peña Rodríguez
5fd1af7559 feat(rbac): add permission_state filter 2024-12-10 10:33:26 +01:00
Pablo Lara
83c7ced6ff Merge branch 'feat-rbac' into PRWLR-4669-Roles-Page-API-UI 2024-12-10 09:29:24 +01:00
Adrián Jesús Peña Rodríguez
67d9ff2419 ref(provider_groups): ref the provider_groups relationships endpoint (#6033) 2024-12-10 09:28:22 +01:00
Pablo Lara
130fddae1e feat: edit role feature 2024-12-10 09:08:25 +01:00
Sergio Garcia
38a0d2d740 fix(aws): set same severity for EC2 IMDSv2 checks (#6046) 2024-12-10 08:55:41 +01:00
Mario Rodriguez Lopez
5c2adf1e14 docs(unitesting): Make some fixes to the documentation (#6102) 2024-12-10 08:51:19 +01:00
Pablo Lara
04b9f81e26 feat: add new role feature 2024-12-10 07:24:01 +01:00
Daniel Barranquero
7ddd2c04c8 feat(awslambda): add new fixer awslambda_function_not_publicly_accessible_fixer (#5840) 2024-12-09 12:28:42 -04:00
Pepe Fagoaga
9a55632d8e fix(backport): more than one backport tag is allowed (#6090) 2024-12-09 17:19:33 +01:00
dependabot[bot]
f8b4427505 chore(deps-dev): bump vulture from 2.13 to 2.14 (#6068)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 11:10:41 -04:00
Sergio Garcia
f1efc1456d chore(dependabot): change interval of PRs (#6086) 2024-12-09 15:46:28 +01:00
Sergio Garcia
2ea5851b67 docs(api): add commands to run API scheduler (#6085) 2024-12-09 10:34:02 -04:00
dependabot[bot]
a3051bc4e3 chore(deps-dev): bump mkdocs-material from 9.5.47 to 9.5.48 (#6073)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 10:14:08 -04:00
Pepe Fagoaga
d454427b8b fix(backport): remove v from branch prefix (#6081) 2024-12-09 10:13:20 -04:00
Pepe Fagoaga
4b41bd6adf chore(containers): support for v4.6 branch (#6063)
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-09 09:23:06 -04:00
Pepe Fagoaga
cdd044d120 chore(dependabot): Update for UI and v4 (#6062) 2024-12-09 09:15:03 -04:00
Pepe Fagoaga
213a793fbc chore(actions): standardize names (#6059) 2024-12-09 09:14:06 -04:00
Pepe Fagoaga
a8a567c588 docs: Prowler SaaS -> Cloud and add missing compliance (#6061) 2024-12-09 09:12:54 -04:00
Pepe Fagoaga
fefe89a1ed fix(backport): Add action to detect labels (#5270) 2024-12-09 09:12:08 -04:00
Sergio Garcia
493fe2d523 docs(env): move warning about env files (#6049) 2024-12-09 11:11:05 +01:00
dependabot[bot]
d8fc830f1d chore(deps): bump boto3 from 1.35.71 to 1.35.76 (#6054)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 10:11:51 +01:00
Pepe Fagoaga
b6c3ba0f0d chore: delete unneeded requirements file (#6056) 2024-12-09 09:07:10 +01:00
dependabot[bot]
32cd39d158 chore(deps-dev): bump coverage from 7.6.8 to 7.6.9 (#6053)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 20:29:06 -04:00
dependabot[bot]
203275817f chore(deps-dev): bump pytest from 8.3.3 to 8.3.4 (#5992)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 12:53:11 -04:00
dependabot[bot]
c05c3396b5 chore(deps-dev): bump mkdocs-material from 9.5.46 to 9.5.47 (#5988)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 11:56:37 -04:00
dependabot[bot]
8f172aec8a chore(deps-dev): bump pylint from 3.3.1 to 3.3.2 (#5993)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 11:04:28 -04:00
dependabot[bot]
263a7e2134 chore(deps): bump botocore from 1.35.71 to 1.35.76 (#6037) 2024-12-06 09:41:57 -04:00
dependabot[bot]
a2ea216604 chore(deps): bump slack-sdk from 3.33.4 to 3.33.5 (#6039)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 08:44:00 -04:00
dependabot[bot]
77c572f990 chore(deps): bump trufflesecurity/trufflehog from 3.84.1 to 3.85.0 (#6040)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-06 08:38:14 -04:00
Prowler Bot
bb0c346c4d chore(regions_update): Changes in regions for AWS services (#6041)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-12-06 08:38:03 -04:00
Daniel Barranquero
2ce8e1fd21 fix(backup): modify list recovery points call (#5996) 2024-12-06 08:35:29 -04:00
Pepe Fagoaga
ecfd94aeb1 fix(codecov): create components (#6028) 2024-12-05 16:35:56 +01:00
Pablo Lara
29bc697487 feat: add roles page 2024-12-05 15:25:07 +01:00
Pedro Martín
eddc672264 chore(version): update prowler version (#6027) 2024-12-05 13:51:13 +01:00
Pedro Martín
8c71a39487 docs(prowler-app): add link to https://api.prowler.com/api/v1/docs (#6016) 2024-12-05 11:01:51 +01:00
Pedro Martín
ff0ac27723 docs(index): update index with images (#6015) 2024-12-05 11:01:42 +01:00
Pablo Lara
381aa93f55 chore: add roles item to the sidebar 2024-12-05 09:12:31 +01:00
Adrián Jesús Peña Rodríguez
2bee4b986f Merge branch 'master' into feat-rbac 2024-12-04 15:42:47 +01:00
Víctor Fernández Poyatos
ad7134d283 fix(tenant): fix delete tenants behavior (#6013) 2024-12-04 13:57:16 +01:00
Adrián Jesús Peña Rodríguez
9723b8fac1 Merge branch 'master' into feat-rbac 2024-12-04 12:51:39 +01:00
Pablo Lara
58723ae52e fix(invitations): remove wrong url (#6005) 2024-12-03 21:08:31 +01:00
Adrián Jesús Peña Rodríguez
67ef67add9 feat(api-rbac): RBAC system (#5903) 2024-11-26 12:32:55 +01:00
170 changed files with 12370 additions and 1986 deletions

View File

@@ -1,3 +1,3 @@
name: "Custom CodeQL Config for API"
name: "API - CodeQL Config"
paths:
- 'api/'
- "api/"

View File

@@ -1,4 +0,0 @@
name: "Custom CodeQL Config"
paths-ignore:
- 'api/'
- 'ui/'

4
.github/codeql/sdk-codeql-config.yml vendored Normal file
View File

@@ -0,0 +1,4 @@
name: "SDK - CodeQL Config"
paths-ignore:
- "api/"
- "ui/"

View File

@@ -1,3 +1,3 @@
name: "Custom CodeQL Config for UI"
name: "UI - CodeQL Config"
paths:
- "ui/"

View File

@@ -5,6 +5,7 @@
version: 2
updates:
# v5
- package-ecosystem: "pip"
directory: "/"
schedule:
@@ -14,6 +15,7 @@ updates:
labels:
- "dependencies"
- "pip"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
@@ -24,20 +26,55 @@ updates:
- "dependencies"
- "github_actions"
- package-ecosystem: "pip"
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "npm"
# v4.6
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "pip"
- "v4"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "github_actions"
- "v4"
# v3
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "pip"
- "v3"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:

View File

@@ -0,0 +1,81 @@
name: API - Build and Push containers
on:
push:
branches:
- "master"
paths:
- "api/**"
- ".github/workflows/api-build-lint-push-containers.yml"
# Uncomment the code below to test this action on PRs
# pull_request:
# branches:
# - "master"
# paths:
# - "api/**"
# - ".github/workflows/api-build-lint-push-containers.yml"
release:
types: [published]
env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
WORKING_DIRECTORY: ./api
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
jobs:
# Build Prowler OSS container
container-build-push:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Repository check
working-directory: /tmp
run: |
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -9,11 +9,11 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "API - CodeQL"
name: API - CodeQL
on:
push:
branches:
branches:
- "master"
- "v3"
- "v4.*"
@@ -21,7 +21,7 @@ on:
paths:
- "api/**"
pull_request:
branches:
branches:
- "master"
- "v3"
- "v4.*"

View File

@@ -1,4 +1,4 @@
name: "API - Pull Request"
name: API - Pull Request
on:
push:
@@ -69,6 +69,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
@@ -80,18 +81,21 @@ jobs:
api/permissions/**
api/README.md
api/mkdocs.yml
- name: Install poetry
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
@@ -109,48 +113,59 @@ jobs:
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry lock --check
- name: Lint with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff check . --exclude contrib
- name: Check Format with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff format --check . --exclude contrib
- name: Lint with pylint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
- name: Bandit
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612,66963
- name: Vulture
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
- name: Hadolint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: api

View File

@@ -1,42 +1,47 @@
name: Automatic Backport
name: Prowler - Automatic Backport
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
env:
# The prefix of the label that triggers the backport must not contain the branch name
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
BACKPORT_LABEL_PREFIX: backport-to-
BACKPORT_LABEL_IGNORE: was-backported
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
runs-on: ubuntu-latest
permissions:
id-token: write
pull-requests: write
contents: write
steps:
# Workaround not to fail the workflow if the PR does not need a backport
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
- name: Check for backport labels
id: check_labels
run: |-
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
echo "$labels"
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
echo "matched=$matched"
echo "matched=$matched" >> $GITHUB_OUTPUT
- name: Check labels
id: preview_label_check
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
with:
allow_failure: true
prefix_mode: true
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
repo_token: ${{ secrets.GITHUB_TOKEN }}
- name: Backport Action
if: fromJSON(steps.check_labels.outputs.matched) > 0
if: steps.preview_label_check.outputs.label_check == 'success'
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: backport-to-
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
- name: Info log
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
run: cat ~/.backport/backport.debug.log

View File

@@ -1,4 +1,4 @@
name: Pull Request Documentation Link
name: Prowler - Pull Request Documentation Link
on:
pull_request:

View File

@@ -1,4 +1,4 @@
name: Find secrets
name: Prowler - Find secrets
on: pull_request
@@ -11,9 +11,9 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.84.1
uses: trufflesecurity/trufflehog@v3.86.1
with:
path: ./
base: ${{ github.event.repository.default_branch }}
head: HEAD
extra_args: --only-verified
extra_args: --only-verified

View File

@@ -1,4 +1,4 @@
name: "Pull Request Labeler"
name: Prowler - PR Labeler
on:
pull_request_target:

View File

@@ -1,9 +1,13 @@
name: Build and Push containers
name: SDK - Build and Push containers
on:
push:
branches:
# For `v3-latest`
- "v3"
# For `v4-latest`
- "v4.6"
# For `latest`
- "master"
paths-ignore:
- ".github/**"
@@ -85,8 +89,8 @@ jobs:
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
;;
4)
4)
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
;;

View File

@@ -9,11 +9,11 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
name: SDK - CodeQL
on:
push:
branches:
branches:
- "master"
- "v3"
- "v4.*"
@@ -21,7 +21,7 @@ on:
- 'ui/**'
- 'api/**'
pull_request:
branches:
branches:
- "master"
- "v3"
- "v4.*"
@@ -55,7 +55,7 @@ jobs:
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/codeql-config.yml
config-file: ./.github/codeql/sdk-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@@ -1,4 +1,4 @@
name: "Pull Request"
name: SDK - Pull Request
on:
push:
@@ -22,6 +22,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
@@ -36,17 +37,20 @@ jobs:
README.md
mkdocs.yml
.backportrc.json
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
@@ -57,44 +61,56 @@ jobs:
sed -E 's/.*"v([^"]+)".*/\1/' \
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
&& chmod +x /tmp/hadolint
- name: Poetry check
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry lock --check
- name: Lint with flake8
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
- name: Checking format with black
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run black --exclude api ui --check .
- name: Lint with pylint
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
- name: Bandit
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
- name: Safety
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612 -r pyproject.toml
- name: Vulture
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
- name: Hadolint
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler

View File

@@ -1,4 +1,4 @@
name: PyPI release
name: SDK - PyPI release
on:
release:

View File

@@ -1,6 +1,6 @@
# This is a basic workflow to help you get started with Actions
name: Refresh regions of AWS services
name: SDK - Refresh AWS services' regions
on:
schedule:

View File

@@ -0,0 +1,81 @@
name: UI - Build and Push containers
on:
push:
branches:
- "master"
paths:
- "ui/**"
- ".github/workflows/ui-build-lint-push-containers.yml"
# Uncomment the below code to test this action on PRs
# pull_request:
# branches:
# - "master"
# paths:
# - "ui/**"
# - ".github/workflows/ui-build-lint-push-containers.yml"
release:
types: [published]
env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
WORKING_DIRECTORY: ./ui
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
jobs:
# Build Prowler OSS container
container-build-push:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Repository check
working-directory: /tmp
run: |
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -9,7 +9,7 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "UI - CodeQL"
name: UI - CodeQL
on:
push:

View File

@@ -1,4 +1,4 @@
name: "UI - Pull Request"
name: UI - Pull Request
on:
pull_request:
@@ -31,4 +31,4 @@ jobs:
run: npm run healthcheck
- name: Build the application
working-directory: ./ui
run: npm run build
run: npm run build

View File

@@ -1,4 +1,4 @@
FROM python:3.12-alpine
FROM python:3.12.8-alpine3.20
LABEL maintainer="https://github.com/prowler-cloud/prowler"

View File

@@ -3,7 +3,7 @@
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
</p>
<p align="center">
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
</p>
<p align="center">
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
@@ -29,7 +29,7 @@
<p align="center">
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler?include_prereleases"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
@@ -43,7 +43,7 @@
# Description
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
@@ -139,6 +139,19 @@ cd src/backend
python -m celery -A config.celery worker -l info -E
```
**Commands to run the API Scheduler**
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
poetry shell
set -a
source .env
cd src/backend
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
```
**Commands to run the UI**
``` console

1956
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -27,7 +27,7 @@ drf-nested-routers = "^0.94.1"
drf-spectacular = "0.27.2"
drf-spectacular-jsonapi = "0.5.1"
gunicorn = "23.0.0"
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "master"}
prowler = {git = "https://github.com/prowler-cloud/prowler.git", tag = "5.0.0"}
psycopg2-binary = "2.9.9"
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
# Needed for prowler compatibility

View File

@@ -1,6 +1,7 @@
import uuid
from django.db import transaction, connection
from django.core.exceptions import ObjectDoesNotExist
from django.db import connection, transaction
from rest_framework import permissions
from rest_framework.exceptions import NotAuthenticated
from rest_framework.filters import SearchFilter
@@ -10,6 +11,8 @@ from rest_framework_json_api.views import ModelViewSet
from rest_framework_simplejwt.authentication import JWTAuthentication
from api.filters import CustomDjangoFilterBackend
from api.models import Role, Tenant
from api.db_router import MainRouter
class BaseViewSet(ModelViewSet):
@@ -66,13 +69,67 @@ class BaseRLSViewSet(BaseViewSet):
class BaseTenantViewset(BaseViewSet):
def dispatch(self, request, *args, **kwargs):
with transaction.atomic():
return super().dispatch(request, *args, **kwargs)
tenant = super().dispatch(request, *args, **kwargs)
try:
# If the request is a POST, create the admin role
if request.method == "POST":
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
except Exception as e:
self._handle_creation_error(e, tenant)
raise
return tenant
def _create_admin_role(self, tenant_id):
Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant_id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
def _handle_creation_error(self, error, tenant):
if tenant.data.get("id"):
try:
Tenant.objects.using(MainRouter.admin_db).filter(
id=tenant.data["id"]
).delete()
except ObjectDoesNotExist:
pass # Tenant might not exist, handle gracefully
def initial(self, request, *args, **kwargs):
user_id = str(request.user.id)
if (
request.resolver_match.url_name != "tenant-detail"
and request.method != "DELETE"
):
user_id = str(request.user.id)
with connection.cursor() as cursor:
cursor.execute(f"SELECT set_config('api.user_id', '{user_id}', TRUE);")
return super().initial(request, *args, **kwargs)
# TODO: DRY this when we have time
if request.auth is None:
raise NotAuthenticated
tenant_id = request.auth.get("tenant_id")
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(f"SELECT set_config('api.user_id', '{user_id}', TRUE);")
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)

View File

@@ -22,13 +22,10 @@ from api.db_utils import (
StatusEnumField,
)
from api.models import (
ComplianceOverview,
Finding,
Invitation,
Membership,
Provider,
ProviderGroup,
ProviderSecret,
Resource,
ResourceTag,
Scan,
@@ -36,6 +33,10 @@ from api.models import (
SeverityChoices,
StateChoices,
StatusChoices,
ProviderSecret,
Invitation,
Role,
ComplianceOverview,
Task,
User,
)
@@ -481,6 +482,43 @@ class UserFilter(FilterSet):
}
class RoleFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
permission_state = CharFilter(method="filter_permission_state")
def filter_permission_state(self, queryset, name, value):
permission_fields = [
"manage_users",
"manage_account",
"manage_billing",
"manage_providers",
"manage_integrations",
"manage_scans",
]
q_all_true = Q(**{field: True for field in permission_fields})
q_all_false = Q(**{field: False for field in permission_fields})
if value == "unlimited":
return queryset.filter(q_all_true)
elif value == "none":
return queryset.filter(q_all_false)
elif value == "limited":
return queryset.exclude(q_all_true | q_all_false)
else:
return queryset.none()
class Meta:
model = Role
fields = {
"id": ["exact", "in"],
"name": ["exact", "in"],
"inserted_at": ["gte", "lte"],
"updated_at": ["gte", "lte"],
}
class ComplianceOverviewFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)

View File

@@ -58,5 +58,96 @@
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
"inserted_at": "2024-11-13T11:55:41.237Z"
}
},
{
"model": "api.role",
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "admin",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": true,
"manage_scans": true,
"unlimited_visibility": true,
"inserted_at": "2024-11-20T15:32:42.402Z",
"updated_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.role",
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "first_role",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": false,
"manage_scans": false,
"unlimited_visibility": true,
"inserted_at": "2024-11-20T15:31:53.239Z",
"updated_at": "2024-11-20T15:31:53.239Z"
}
},
{
"model": "api.role",
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "third_role",
"manage_users": false,
"manage_account": false,
"manage_billing": false,
"manage_providers": false,
"manage_integrations": false,
"manage_scans": true,
"unlimited_visibility": false,
"inserted_at": "2024-11-20T15:34:05.440Z",
"updated_at": "2024-11-20T15:34:05.440Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.roleprovidergrouprelationship",
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
"inserted_at": "2024-11-20T15:32:42.402Z"
}
},
{
"model": "api.userrolerelationship",
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"inserted_at": "2024-11-20T15:36:14.302Z"
}
}
]

View File

@@ -552,7 +552,7 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="providergroupmembership",
constraint=models.UniqueConstraint(
fields=("provider_id", "provider_group"),
fields=("provider_id", "provider_group_id"),
name="unique_provider_group_membership",
),
),

View File

@@ -0,0 +1,246 @@
# Generated by Django 5.1.1 on 2024-12-05 12:29
import api.rls
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0002_token_migrations"),
]
operations = [
migrations.CreateModel(
name="Role",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("name", models.CharField(max_length=255)),
("manage_users", models.BooleanField(default=False)),
("manage_account", models.BooleanField(default=False)),
("manage_billing", models.BooleanField(default=False)),
("manage_providers", models.BooleanField(default=False)),
("manage_integrations", models.BooleanField(default=False)),
("manage_scans", models.BooleanField(default=False)),
("unlimited_visibility", models.BooleanField(default=False)),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "roles",
},
),
migrations.CreateModel(
name="RoleProviderGroupRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_provider_group_relationship",
},
),
migrations.CreateModel(
name="UserRoleRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_user_relationship",
},
),
migrations.AddField(
model_name="roleprovidergrouprelationship",
name="provider_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
),
),
migrations.AddField(
model_name="roleprovidergrouprelationship",
name="role",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
migrations.AddField(
model_name="role",
name="provider_groups",
field=models.ManyToManyField(
related_name="roles",
through="api.RoleProviderGroupRelationship",
to="api.providergroup",
),
),
migrations.AddField(
model_name="userrolerelationship",
name="role",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
migrations.AddField(
model_name="userrolerelationship",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="role",
name="users",
field=models.ManyToManyField(
related_name="roles",
through="api.UserRoleRelationship",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AddConstraint(
model_name="roleprovidergrouprelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "provider_group_id"),
name="unique_role_provider_group_relationship",
),
),
migrations.AddConstraint(
model_name="roleprovidergrouprelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_roleprovidergrouprelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="userrolerelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "user_id"), name="unique_role_user_relationship"
),
),
migrations.AddConstraint(
model_name="userrolerelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_userrolerelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="role",
constraint=models.UniqueConstraint(
fields=("tenant_id", "name"), name="unique_role_per_tenant"
),
),
migrations.AddConstraint(
model_name="role",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_role",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.CreateModel(
name="InvitationRoleRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"invitation",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
),
),
(
"role",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.role"
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "role_invitation_relationship",
},
),
migrations.AddConstraint(
model_name="invitationrolerelationship",
constraint=models.UniqueConstraint(
fields=("role_id", "invitation_id"),
name="unique_role_invitation_relationship",
),
),
migrations.AddConstraint(
model_name="invitationrolerelationship",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_invitationrolerelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddField(
model_name="role",
name="invitations",
field=models.ManyToManyField(
related_name="roles",
through="api.InvitationRoleRelationship",
to="api.invitation",
),
),
]

View File

@@ -294,29 +294,20 @@ class ProviderGroup(RowLevelSecurityProtectedModel):
]
class JSONAPIMeta:
resource_name = "provider-groups"
resource_name = "provider-group"
class ProviderGroupMembership(RowLevelSecurityProtectedModel):
objects = ActiveProviderManager()
all_objects = models.Manager()
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
provider = models.ForeignKey(
Provider,
on_delete=models.CASCADE,
)
provider_group = models.ForeignKey(
ProviderGroup,
on_delete=models.CASCADE,
)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "provider_group_memberships"
constraints = [
models.UniqueConstraint(
fields=["provider_id", "provider_group"],
fields=["provider_id", "provider_group_id"],
name="unique_provider_group_membership",
),
RowLevelSecurityConstraint(
@@ -327,7 +318,7 @@ class ProviderGroupMembership(RowLevelSecurityProtectedModel):
]
class JSONAPIMeta:
resource_name = "provider-group-memberships"
resource_name = "provider_groups-provider"
class Task(RowLevelSecurityProtectedModel):
@@ -851,6 +842,118 @@ class Invitation(RowLevelSecurityProtectedModel):
resource_name = "invitations"
class Role(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
name = models.CharField(max_length=255)
manage_users = models.BooleanField(default=False)
manage_account = models.BooleanField(default=False)
manage_billing = models.BooleanField(default=False)
manage_providers = models.BooleanField(default=False)
manage_integrations = models.BooleanField(default=False)
manage_scans = models.BooleanField(default=False)
unlimited_visibility = models.BooleanField(default=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
provider_groups = models.ManyToManyField(
ProviderGroup, through="RoleProviderGroupRelationship", related_name="roles"
)
users = models.ManyToManyField(
User, through="UserRoleRelationship", related_name="roles"
)
invitations = models.ManyToManyField(
Invitation, through="InvitationRoleRelationship", related_name="roles"
)
class Meta:
db_table = "roles"
constraints = [
models.UniqueConstraint(
fields=["tenant_id", "name"],
name="unique_role_per_tenant",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "role"
class RoleProviderGroupRelationship(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "role_provider_group_relationship"
constraints = [
models.UniqueConstraint(
fields=["role_id", "provider_group_id"],
name="unique_role_provider_group_relationship",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "role-provider_groups"
class UserRoleRelationship(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "role_user_relationship"
constraints = [
models.UniqueConstraint(
fields=["role_id", "user_id"],
name="unique_role_user_relationship",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "user-roles"
class InvitationRoleRelationship(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
invitation = models.ForeignKey(Invitation, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "role_invitation_relationship"
constraints = [
models.UniqueConstraint(
fields=["role_id", "invitation_id"],
name="unique_role_invitation_relationship",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "invitation-roles"
class ComplianceOverview(RowLevelSecurityProtectedModel):
objects = ActiveProviderManager()
all_objects = models.Manager()

View File

@@ -0,0 +1,40 @@
from config.django.base import DISABLE_RBAC
from enum import Enum
from rest_framework.permissions import BasePermission
class Permissions(Enum):
MANAGE_USERS = "manage_users"
MANAGE_ACCOUNT = "manage_account"
MANAGE_BILLING = "manage_billing"
MANAGE_PROVIDERS = "manage_providers"
MANAGE_INTEGRATIONS = "manage_integrations"
MANAGE_SCANS = "manage_scans"
UNLIMITED_VISIBILITY = "unlimited_visibility"
class HasPermissions(BasePermission):
"""
Custom permission to check if the user's role has the required permissions.
The required permissions should be specified in the view as a list in `required_permissions`.
"""
def has_permission(self, request, view):
# This is for testing/demo purposes only
if DISABLE_RBAC:
return True
required_permissions = getattr(view, "required_permissions", [])
if not required_permissions:
return True
user_roles = request.user.roles.all()
if not user_roles:
return False
for perm in required_permissions:
if not getattr(user_roles[0], perm.value, False):
return False
return True

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,7 @@ from conftest import TEST_USER, TEST_PASSWORD, get_api_tokens, get_authorization
def test_check_resources_between_different_tenants(
schedule_mock,
enforce_test_user_db_connection,
patch_testing_flag,
authenticated_api_client,
tenants_fixture,
):

View File

@@ -0,0 +1,302 @@
# TODO: Enable this tests
import pytest
from django.urls import reverse
from rest_framework import status
from unittest.mock import patch, ANY, Mock
@pytest.mark.django_db
class TestUserViewSet:
def test_list_users_with_all_permissions(self, authenticated_client_rbac):
response = authenticated_client_rbac.get(reverse("user-list"))
assert response.status_code == status.HTTP_200_OK
assert isinstance(response.json()["data"], list)
def test_list_users_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
response = authenticated_client_no_permissions_rbac.get(reverse("user-list"))
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_retrieve_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.get(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
)
assert response.status_code == status.HTTP_200_OK
assert (
response.json()["data"]["attributes"]["email"]
== create_test_user_rbac.email
)
def test_retrieve_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.get(
reverse("user-detail", kwargs={"pk": create_test_user.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
valid_user_payload = {
"name": "test",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_rbac.post(
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
@patch("api.db_router.MainRouter.admin_db", new="default")
def test_create_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
valid_user_payload = {
"name": "test",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_no_permissions_rbac.post(
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
def test_partial_update_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
updated_data = {
"data": {
"type": "users",
"id": str(create_test_user_rbac.id),
"attributes": {"name": "Updated Name"},
},
}
response = authenticated_client_rbac.patch(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id}),
data=updated_data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["name"] == "Updated Name"
def test_partial_update_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
updated_data = {
"data": {
"type": "users",
"attributes": {"name": "Updated Name"},
}
}
response = authenticated_client_no_permissions_rbac.patch(
reverse("user-detail", kwargs={"pk": create_test_user.id}),
data=updated_data,
format="vnd.api+json",
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_delete_user_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.delete(
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
def test_delete_user_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.delete(
reverse("user-detail", kwargs={"pk": create_test_user.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_me_with_all_permissions(
self, authenticated_client_rbac, create_test_user_rbac
):
response = authenticated_client_rbac.get(reverse("user-me"))
assert response.status_code == status.HTTP_200_OK
assert (
response.json()["data"]["attributes"]["email"]
== create_test_user_rbac.email
)
def test_me_with_no_permissions(
self, authenticated_client_no_permissions_rbac, create_test_user
):
response = authenticated_client_no_permissions_rbac.get(reverse("user-me"))
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["email"] == "rbac_limited@rbac.com"
@pytest.mark.django_db
class TestProviderViewSet:
def test_list_providers_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
response = authenticated_client_rbac.get(reverse("provider-list"))
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(providers_fixture)
def test_list_providers_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
response = authenticated_client_no_permissions_rbac.get(
reverse("provider-list")
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 0
def test_retrieve_provider_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_rbac.get(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["alias"] == provider.alias
def test_retrieve_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.get(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_create_provider_with_all_permissions(self, authenticated_client_rbac):
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
response = authenticated_client_rbac.post(
reverse("provider-list"), data=payload, format="json"
)
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["data"]["attributes"]["alias"] == "new_alias"
def test_create_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac
):
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
response = authenticated_client_no_permissions_rbac.post(
reverse("provider-list"), data=payload, format="json"
)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_partial_update_provider_with_all_permissions(
self, authenticated_client_rbac, providers_fixture
):
provider = providers_fixture[0]
payload = {
"data": {
"type": "providers",
"id": provider.id,
"attributes": {"alias": "updated_alias"},
},
}
response = authenticated_client_rbac.patch(
reverse("provider-detail", kwargs={"pk": provider.id}),
data=payload,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["alias"] == "updated_alias"
def test_partial_update_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
update_payload = {
"data": {
"type": "providers",
"attributes": {"alias": "updated_alias"},
}
}
response = authenticated_client_no_permissions_rbac.patch(
reverse("provider-detail", kwargs={"pk": provider.id}),
data=update_payload,
format="vnd.api+json",
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@patch("api.v1.views.Task.objects.get")
@patch("api.v1.views.delete_provider_task.delay")
def test_delete_provider_with_all_permissions(
self,
mock_delete_task,
mock_task_get,
authenticated_client_rbac,
providers_fixture,
tasks_fixture,
):
prowler_task = tasks_fixture[0]
task_mock = Mock()
task_mock.id = prowler_task.id
mock_delete_task.return_value = task_mock
mock_task_get.return_value = prowler_task
provider1, *_ = providers_fixture
response = authenticated_client_rbac.delete(
reverse("provider-detail", kwargs={"pk": provider1.id})
)
assert response.status_code == status.HTTP_202_ACCEPTED
mock_delete_task.assert_called_once_with(
provider_id=str(provider1.id), tenant_id=ANY
)
assert "Content-Location" in response.headers
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
def test_delete_provider_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.delete(
reverse("provider-detail", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@patch("api.v1.views.Task.objects.get")
@patch("api.v1.views.check_provider_connection_task.delay")
def test_connection_with_all_permissions(
self,
mock_provider_connection,
mock_task_get,
authenticated_client_rbac,
providers_fixture,
tasks_fixture,
):
prowler_task = tasks_fixture[0]
task_mock = Mock()
task_mock.id = prowler_task.id
task_mock.status = "PENDING"
mock_provider_connection.return_value = task_mock
mock_task_get.return_value = prowler_task
provider1, *_ = providers_fixture
assert provider1.connected is None
assert provider1.connection_last_checked_at is None
response = authenticated_client_rbac.post(
reverse("provider-connection", kwargs={"pk": provider1.id})
)
assert response.status_code == status.HTTP_202_ACCEPTED
mock_provider_connection.assert_called_once_with(
provider_id=str(provider1.id), tenant_id=ANY
)
assert "Content-Location" in response.headers
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
def test_connection_with_no_permissions(
self, authenticated_client_no_permissions_rbac, providers_fixture
):
provider = providers_fixture[0]
response = authenticated_client_no_permissions_rbac.post(
reverse("provider-connection", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN

View File

@@ -9,11 +9,14 @@ from django.urls import reverse
from rest_framework import status
from api.models import (
Invitation,
Membership,
Provider,
ProviderGroup,
ProviderGroupMembership,
Role,
RoleProviderGroupRelationship,
Invitation,
UserRoleRelationship,
ProviderSecret,
Scan,
StateChoices,
@@ -24,6 +27,14 @@ from api.rls import Tenant
TODAY = str(datetime.today().date())
@pytest.fixture(autouse=True)
def enable_testing_flag(patch_testing_flag):
"""
Automatically applies the patch_testing_flag fixture to all tests in this file.
"""
pass
@pytest.mark.django_db
class TestUserViewSet:
def test_users_list(self, authenticated_client, create_test_user):
@@ -418,13 +429,24 @@ class TestTenantViewSet:
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_tenants_delete(self, authenticated_client, tenants_fixture):
@patch("api.db_router.MainRouter.admin_db", new="default")
@patch("api.v1.views.delete_tenant_task.apply_async")
def test_tenants_delete(
self, delete_tenant_mock, authenticated_client, tenants_fixture
):
def _delete_tenant(kwargs):
Tenant.objects.filter(pk=kwargs.get("tenant_id")).delete()
delete_tenant_mock.side_effect = _delete_tenant
tenant1, *_ = tenants_fixture
response = authenticated_client.delete(
reverse("tenant-detail", kwargs={"pk": tenant1.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
assert Tenant.objects.count() == len(tenants_fixture) - 1
assert Membership.objects.filter(tenant_id=tenant1.id).count() == 0
# User is not deleted because it has another membership
assert User.objects.count() == 1
def test_tenants_delete_invalid(self, authenticated_client):
response = authenticated_client.delete(
@@ -1189,7 +1211,7 @@ class TestProviderGroupViewSet:
def test_provider_group_create(self, authenticated_client):
data = {
"data": {
"type": "provider-groups",
"type": "provider-group",
"attributes": {
"name": "Test Provider Group",
},
@@ -1208,7 +1230,7 @@ class TestProviderGroupViewSet:
def test_provider_group_create_invalid(self, authenticated_client):
data = {
"data": {
"type": "provider-groups",
"type": "provider-group",
"attributes": {
# Name is missing
},
@@ -1230,7 +1252,7 @@ class TestProviderGroupViewSet:
data = {
"data": {
"id": str(provider_group.id),
"type": "provider-groups",
"type": "provider-group",
"attributes": {
"name": "Updated Provider Group Name",
},
@@ -1252,7 +1274,7 @@ class TestProviderGroupViewSet:
data = {
"data": {
"id": str(provider_group.id),
"type": "provider-groups",
"type": "provider-group",
"attributes": {
"name": "", # Invalid name
},
@@ -1283,100 +1305,6 @@ class TestProviderGroupViewSet:
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_provider_group_providers_update(
self, authenticated_client, provider_groups_fixture, providers_fixture
):
provider_group = provider_groups_fixture[0]
provider_ids = [str(provider.id) for provider in providers_fixture]
data = {
"data": {
"type": "provider-group-memberships",
"id": str(provider_group.id),
"attributes": {"provider_ids": provider_ids},
}
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
memberships = ProviderGroupMembership.objects.filter(
provider_group=provider_group
)
assert memberships.count() == len(provider_ids)
for membership in memberships:
assert str(membership.provider_id) in provider_ids
def test_provider_group_providers_update_non_existent_provider(
self, authenticated_client, provider_groups_fixture, providers_fixture
):
provider_group = provider_groups_fixture[0]
provider_ids = [str(provider.id) for provider in providers_fixture]
provider_ids[-1] = "1b59e032-3eb6-4694-93a5-df84cd9b3ce2"
data = {
"data": {
"type": "provider-group-memberships",
"id": str(provider_group.id),
"attributes": {"provider_ids": provider_ids},
}
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert (
errors[0]["detail"]
== f"The following provider IDs do not exist: {provider_ids[-1]}"
)
def test_provider_group_providers_update_invalid_provider(
self, authenticated_client, provider_groups_fixture
):
provider_group = provider_groups_fixture[1]
invalid_provider_id = "non-existent-id"
data = {
"data": {
"type": "provider-group-memberships",
"id": str(provider_group.id),
"attributes": {"provider_ids": [invalid_provider_id]},
}
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["detail"] == "Must be a valid UUID."
def test_provider_group_providers_update_invalid_payload(
self, authenticated_client, provider_groups_fixture
):
provider_group = provider_groups_fixture[2]
data = {
# Missing "provider_ids"
}
response = authenticated_client.put(
reverse("providergroup-providers", kwargs={"pk": provider_group.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["detail"] == "Received document does not contain primary data"
def test_provider_group_retrieve_not_found(self, authenticated_client):
response = authenticated_client.get(
reverse("providergroup-detail", kwargs={"pk": "non-existent-id"})
@@ -2641,7 +2569,9 @@ class TestInvitationViewSet:
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_invitations_create_valid(self, authenticated_client, create_test_user):
def test_invitations_create_valid(
self, authenticated_client, create_test_user, roles_fixture
):
user = create_test_user
data = {
"data": {
@@ -2650,6 +2580,11 @@ class TestInvitationViewSet:
"email": "any_email@prowler.com",
"expires_at": self.TOMORROW_ISO,
},
"relationships": {
"roles": {
"data": [{"type": "role", "id": str(roles_fixture[0].id)}]
}
},
}
}
response = authenticated_client.post(
@@ -2708,6 +2643,11 @@ class TestInvitationViewSet:
response.json()["errors"][0]["source"]["pointer"]
== "/data/attributes/email"
)
assert response.json()["errors"][1]["code"] == "required"
assert (
response.json()["errors"][1]["source"]["pointer"]
== "/data/relationships/roles"
)
def test_invitations_create_invalid_expires_at(
self, authenticated_client, invitations_fixture
@@ -2734,6 +2674,11 @@ class TestInvitationViewSet:
response.json()["errors"][0]["source"]["pointer"]
== "/data/attributes/expires_at"
)
assert response.json()["errors"][1]["code"] == "required"
assert (
response.json()["errors"][1]["source"]["pointer"]
== "/data/relationships/roles"
)
def test_invitations_partial_update_valid(
self, authenticated_client, invitations_fixture
@@ -2972,7 +2917,6 @@ class TestInvitationViewSet:
response = authenticated_client.post(
reverse("invitation-accept"), data=data, format="json"
)
assert response.status_code == status.HTTP_201_CREATED
invitation.refresh_from_db()
assert Membership.objects.filter(
@@ -3155,6 +3099,596 @@ class TestInvitationViewSet:
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
class TestRoleViewSet:
def test_role_list(self, authenticated_client, roles_fixture):
response = authenticated_client.get(reverse("role-list"))
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(roles_fixture)
def test_role_retrieve(self, authenticated_client, roles_fixture):
role = roles_fixture[0]
response = authenticated_client.get(
reverse("role-detail", kwargs={"pk": role.id})
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data["id"] == str(role.id)
assert data["attributes"]["name"] == role.name
def test_role_create(self, authenticated_client):
data = {
"data": {
"type": "role",
"attributes": {
"name": "Test Role",
"manage_users": "false",
"manage_account": "false",
"manage_billing": "false",
"manage_providers": "true",
"manage_integrations": "true",
"manage_scans": "true",
"unlimited_visibility": "true",
},
"relationships": {"provider_groups": {"data": []}},
}
}
response = authenticated_client.post(
reverse("role-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
response_data = response.json()["data"]
assert response_data["attributes"]["name"] == "Test Role"
assert Role.objects.filter(name="Test Role").exists()
def test_role_provider_groups_create(
self, authenticated_client, provider_groups_fixture
):
data = {
"data": {
"type": "role",
"attributes": {
"name": "Test Role",
"manage_users": "false",
"manage_account": "false",
"manage_billing": "false",
"manage_providers": "true",
"manage_integrations": "true",
"manage_scans": "true",
"unlimited_visibility": "true",
},
"relationships": {
"provider_groups": {
"data": [
{"type": "provider-group", "id": str(provider_group.id)}
for provider_group in provider_groups_fixture[:2]
]
}
},
}
}
response = authenticated_client.post(
reverse("role-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
response_data = response.json()["data"]
assert response_data["attributes"]["name"] == "Test Role"
assert Role.objects.filter(name="Test Role").exists()
relationships = (
Role.objects.filter(name="Test Role").first().provider_groups.all()
)
assert relationships.count() == 2
for relationship in relationships:
assert relationship.id in [pg.id for pg in provider_groups_fixture[:2]]
def test_role_create_invalid(self, authenticated_client):
data = {
"data": {
"type": "role",
"attributes": {
# Name is missing
},
}
}
response = authenticated_client.post(
reverse("role-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["source"]["pointer"] == "/data/attributes/name"
def test_role_partial_update(self, authenticated_client, roles_fixture):
role = roles_fixture[1]
data = {
"data": {
"id": str(role.id),
"type": "role",
"attributes": {
"name": "Updated Provider Group Name",
},
}
}
response = authenticated_client.patch(
reverse("role-detail", kwargs={"pk": role.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
role.refresh_from_db()
assert role.name == "Updated Provider Group Name"
def test_role_partial_update_invalid(self, authenticated_client, roles_fixture):
role = roles_fixture[2]
data = {
"data": {
"id": str(role.id),
"type": "role",
"attributes": {
"name": "", # Invalid name
},
}
}
response = authenticated_client.patch(
reverse("role-detail", kwargs={"pk": role.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert errors[0]["source"]["pointer"] == "/data/attributes/name"
def test_role_destroy(self, authenticated_client, roles_fixture):
role = roles_fixture[2]
response = authenticated_client.delete(
reverse("role-detail", kwargs={"pk": role.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
assert not Role.objects.filter(id=role.id).exists()
def test_role_destroy_invalid(self, authenticated_client):
response = authenticated_client.delete(
reverse("role-detail", kwargs={"pk": "non-existent-id"})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_role_retrieve_not_found(self, authenticated_client):
response = authenticated_client.get(
reverse("role-detail", kwargs={"pk": "non-existent-id"})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_role_list_filters(self, authenticated_client, roles_fixture):
role = roles_fixture[0]
response = authenticated_client.get(
reverse("role-list"), {"filter[name]": role.name}
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert len(data) == 1
assert data[0]["attributes"]["name"] == role.name
def test_role_list_sorting(self, authenticated_client, roles_fixture):
response = authenticated_client.get(reverse("role-list"), {"sort": "name"})
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
names = [item["attributes"]["name"] for item in data]
assert names == sorted(names)
def test_role_invalid_method(self, authenticated_client):
response = authenticated_client.put(reverse("role-list"))
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
@pytest.mark.django_db
class TestUserRoleRelationshipViewSet:
def test_create_relationship(
self, authenticated_client, roles_fixture, create_test_user
):
data = {
"data": [{"type": "role", "id": str(role.id)} for role in roles_fixture[:2]]
}
response = authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(user=create_test_user.id)
assert relationships.count() == 2
for relationship in relationships[1:]: # Skip admin role
assert relationship.role.id in [r.id for r in roles_fixture[:2]]
def test_create_relationship_already_exists(
self, authenticated_client, roles_fixture, create_test_user
):
data = {
"data": [{"type": "role", "id": str(role.id)} for role in roles_fixture[:2]]
}
authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
data = {
"data": [
{"type": "role", "id": str(roles_fixture[0].id)},
]
}
response = authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]["detail"]
assert "already associated" in errors
def test_partial_update_relationship(
self, authenticated_client, roles_fixture, create_test_user
):
data = {
"data": [
{"type": "role", "id": str(roles_fixture[1].id)},
]
}
response = authenticated_client.patch(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(user=create_test_user.id)
assert relationships.count() == 1
assert {rel.role.id for rel in relationships} == {roles_fixture[1].id}
data = {
"data": [
{"type": "role", "id": str(roles_fixture[1].id)},
{"type": "role", "id": str(roles_fixture[2].id)},
]
}
response = authenticated_client.patch(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(user=create_test_user.id)
assert relationships.count() == 2
assert {rel.role.id for rel in relationships} == {
roles_fixture[1].id,
roles_fixture[2].id,
}
def test_destroy_relationship(
self, authenticated_client, roles_fixture, create_test_user
):
response = authenticated_client.delete(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = UserRoleRelationship.objects.filter(role=roles_fixture[0].id)
assert relationships.count() == 0
def test_invalid_provider_group_id(self, authenticated_client, create_test_user):
invalid_id = "non-existent-id"
data = {"data": [{"type": "provider-group", "id": invalid_id}]}
response = authenticated_client.post(
reverse("user-roles-relationship", kwargs={"pk": create_test_user.id}),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"][0]["detail"]
assert "valid UUID" in errors
@pytest.mark.django_db
class TestRoleProviderGroupRelationshipViewSet:
def test_create_relationship(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
data = {
"data": [
{"type": "provider-group", "id": str(provider_group.id)}
for provider_group in provider_groups_fixture[:2]
]
}
response = authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[0].id
)
assert relationships.count() == 2
for relationship in relationships:
assert relationship.provider_group.id in [
pg.id for pg in provider_groups_fixture[:2]
]
def test_create_relationship_already_exists(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
data = {
"data": [
{"type": "provider-group", "id": str(provider_group.id)}
for provider_group in provider_groups_fixture[:2]
]
}
authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
data=data,
content_type="application/vnd.api+json",
)
data = {
"data": [
{"type": "provider-group", "id": str(provider_groups_fixture[0].id)},
]
}
response = authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]["detail"]
assert "already associated" in errors
def test_partial_update_relationship(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
data = {
"data": [
{"type": "provider-group", "id": str(provider_groups_fixture[1].id)},
]
}
response = authenticated_client.patch(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[2].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[2].id
)
assert relationships.count() == 1
assert {rel.provider_group.id for rel in relationships} == {
provider_groups_fixture[1].id
}
data = {
"data": [
{"type": "provider-group", "id": str(provider_groups_fixture[1].id)},
{"type": "provider-group", "id": str(provider_groups_fixture[2].id)},
]
}
response = authenticated_client.patch(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[2].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[2].id
)
assert relationships.count() == 2
assert {rel.provider_group.id for rel in relationships} == {
provider_groups_fixture[1].id,
provider_groups_fixture[2].id,
}
def test_destroy_relationship(
self, authenticated_client, roles_fixture, provider_groups_fixture
):
response = authenticated_client.delete(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[0].id}
),
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = RoleProviderGroupRelationship.objects.filter(
role=roles_fixture[0].id
)
assert relationships.count() == 0
def test_invalid_provider_group_id(self, authenticated_client, roles_fixture):
invalid_id = "non-existent-id"
data = {"data": [{"type": "provider-group", "id": invalid_id}]}
response = authenticated_client.post(
reverse(
"role-provider-groups-relationship", kwargs={"pk": roles_fixture[1].id}
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"][0]["detail"]
assert "valid UUID" in errors
@pytest.mark.django_db
class TestProviderGroupMembershipViewSet:
def test_create_relationship(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(provider.id)}
for provider in providers_fixture[:2]
]
}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group.id
)
assert relationships.count() == 2
for relationship in relationships:
assert relationship.provider.id in [p.id for p in providers_fixture[:2]]
def test_create_relationship_already_exists(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(provider.id)}
for provider in providers_fixture[:2]
]
}
authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
data = {
"data": [
{"type": "provider", "id": str(providers_fixture[0].id)},
]
}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]["detail"]
assert "already associated" in errors
def test_partial_update_relationship(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(providers_fixture[1].id)},
]
}
response = authenticated_client.patch(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group.id
)
assert relationships.count() == 1
assert {rel.provider.id for rel in relationships} == {providers_fixture[1].id}
data = {
"data": [
{"type": "provider", "id": str(providers_fixture[1].id)},
{"type": "provider", "id": str(providers_fixture[2].id)},
]
}
response = authenticated_client.patch(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group.id
)
assert relationships.count() == 2
assert {rel.provider.id for rel in relationships} == {
providers_fixture[1].id,
providers_fixture[2].id,
}
def test_destroy_relationship(
self, authenticated_client, providers_fixture, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
data = {
"data": [
{"type": "provider", "id": str(provider.id)}
for provider in providers_fixture[:2]
]
}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_204_NO_CONTENT
response = authenticated_client.delete(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
)
assert response.status_code == status.HTTP_204_NO_CONTENT
relationships = ProviderGroupMembership.objects.filter(
provider_group=providers_fixture[0].id
)
assert relationships.count() == 0
def test_invalid_provider_group_id(
self, authenticated_client, provider_groups_fixture
):
provider_group, *_ = provider_groups_fixture
invalid_id = "non-existent-id"
data = {"data": [{"type": "provider-group", "id": invalid_id}]}
response = authenticated_client.post(
reverse(
"provider_group-providers-relationship",
kwargs={"pk": provider_group.id},
),
data=data,
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"][0]["detail"]
assert "valid UUID" in errors
@pytest.mark.django_db
class TestComplianceOverviewViewSet:
def test_compliance_overview_list_none(self, authenticated_client):

View File

@@ -14,16 +14,20 @@ from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from rest_framework_simplejwt.tokens import RefreshToken
from api.models import (
ComplianceOverview,
Finding,
Invitation,
Membership,
Provider,
ProviderGroup,
ProviderGroupMembership,
ProviderSecret,
Resource,
ResourceTag,
Finding,
ProviderSecret,
Invitation,
InvitationRoleRelationship,
Role,
RoleProviderGroupRelationship,
UserRoleRelationship,
ComplianceOverview,
Scan,
StateChoices,
Task,
@@ -176,10 +180,26 @@ class UserSerializer(BaseSerializerV1):
"""
memberships = serializers.ResourceRelatedField(many=True, read_only=True)
roles = serializers.ResourceRelatedField(many=True, read_only=True)
class Meta:
model = User
fields = ["id", "name", "email", "company_name", "date_joined", "memberships"]
fields = [
"id",
"name",
"email",
"company_name",
"date_joined",
"memberships",
"roles",
]
extra_kwargs = {
"roles": {"read_only": True},
}
included_serializers = {
"roles": "api.v1.serializers.RoleSerializer",
}
class UserCreateSerializer(BaseWriteSerializer):
@@ -235,6 +255,73 @@ class UserUpdateSerializer(BaseWriteSerializer):
return super().update(instance, validated_data)
class RoleResourceIdentifierSerializer(serializers.Serializer):
resource_type = serializers.CharField(source="type")
id = serializers.UUIDField()
class JSONAPIMeta:
resource_name = "role-identifier"
def to_representation(self, instance):
"""
Ensure 'type' is used in the output instead of 'resource_type'.
"""
representation = super().to_representation(instance)
representation["type"] = representation.pop("resource_type", None)
return representation
def to_internal_value(self, data):
"""
Map 'type' back to 'resource_type' during input.
"""
data["resource_type"] = data.pop("type", None)
return super().to_internal_value(data)
class UserRoleRelationshipSerializer(RLSSerializer, BaseWriteSerializer):
"""
Serializer for modifying user memberships
"""
roles = serializers.ListField(
child=RoleResourceIdentifierSerializer(),
help_text="List of resource identifier objects representing roles.",
)
def create(self, validated_data):
role_ids = [item["id"] for item in validated_data["roles"]]
roles = Role.objects.filter(id__in=role_ids)
tenant_id = self.context.get("tenant_id")
new_relationships = [
UserRoleRelationship(
user=self.context.get("user"), role=r, tenant_id=tenant_id
)
for r in roles
]
UserRoleRelationship.objects.bulk_create(new_relationships)
return self.context.get("user")
def update(self, instance, validated_data):
role_ids = [item["id"] for item in validated_data["roles"]]
roles = Role.objects.filter(id__in=role_ids)
tenant_id = self.context.get("tenant_id")
instance.roles.clear()
new_relationships = [
UserRoleRelationship(user=instance, role=r, tenant_id=tenant_id)
for r in roles
]
UserRoleRelationship.objects.bulk_create(new_relationships)
return instance
class Meta:
model = UserRoleRelationship
fields = ["id", "roles"]
# Tasks
class TaskBase(serializers.ModelSerializer):
state_mapping = {
@@ -361,31 +448,30 @@ class ProviderGroupSerializer(RLSSerializer, BaseWriteSerializer):
providers = serializers.ResourceRelatedField(many=True, read_only=True)
def validate(self, attrs):
tenant = self.context["tenant_id"]
name = attrs.get("name", self.instance.name if self.instance else None)
# Exclude the current instance when checking for uniqueness during updates
queryset = ProviderGroup.objects.filter(tenant=tenant, name=name)
if self.instance:
queryset = queryset.exclude(pk=self.instance.pk)
if queryset.exists():
if ProviderGroup.objects.filter(name=attrs.get("name")).exists():
raise serializers.ValidationError(
{
"name": "A provider group with this name already exists for this tenant."
}
{"name": "A provider group with this name already exists."}
)
return super().validate(attrs)
class Meta:
model = ProviderGroup
fields = ["id", "name", "inserted_at", "updated_at", "providers", "url"]
read_only_fields = ["id", "inserted_at", "updated_at"]
fields = [
"id",
"name",
"inserted_at",
"updated_at",
"providers",
"roles",
"url",
]
extra_kwargs = {
"id": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"roles": {"read_only": True},
"url": {"read_only": True},
}
@@ -406,41 +492,75 @@ class ProviderGroupUpdateSerializer(RLSSerializer, BaseWriteSerializer):
fields = ["id", "name"]
class ProviderGroupMembershipUpdateSerializer(RLSSerializer, BaseWriteSerializer):
class ProviderResourceIdentifierSerializer(serializers.Serializer):
resource_type = serializers.CharField(source="type")
id = serializers.UUIDField()
class JSONAPIMeta:
resource_name = "provider-identifier"
def to_representation(self, instance):
"""
Ensure 'type' is used in the output instead of 'resource_type'.
"""
representation = super().to_representation(instance)
representation["type"] = representation.pop("resource_type", None)
return representation
def to_internal_value(self, data):
"""
Map 'type' back to 'resource_type' during input.
"""
data["resource_type"] = data.pop("type", None)
return super().to_internal_value(data)
class ProviderGroupMembershipSerializer(RLSSerializer, BaseWriteSerializer):
"""
Serializer for modifying provider group memberships
Serializer for modifying provider_group memberships
"""
provider_ids = serializers.ListField(
child=serializers.UUIDField(),
help_text="List of provider UUIDs to add to the group",
providers = serializers.ListField(
child=ProviderResourceIdentifierSerializer(),
help_text="List of resource identifier objects representing providers.",
)
def validate(self, attrs):
tenant_id = self.context["tenant_id"]
provider_ids = attrs.get("provider_ids", [])
def create(self, validated_data):
provider_ids = [item["id"] for item in validated_data["providers"]]
providers = Provider.objects.filter(id__in=provider_ids)
tenant_id = self.context.get("tenant_id")
existing_provider_ids = set(
Provider.objects.filter(
id__in=provider_ids, tenant_id=tenant_id
).values_list("id", flat=True)
)
provided_provider_ids = set(provider_ids)
missing_provider_ids = provided_provider_ids - existing_provider_ids
if missing_provider_ids:
raise serializers.ValidationError(
{
"provider_ids": f"The following provider IDs do not exist: {', '.join(str(id) for id in missing_provider_ids)}"
}
new_relationships = [
ProviderGroupMembership(
provider_group=self.context.get("provider_group"),
provider=p,
tenant_id=tenant_id,
)
for p in providers
]
ProviderGroupMembership.objects.bulk_create(new_relationships)
return super().validate(attrs)
return self.context.get("provider_group")
def update(self, instance, validated_data):
provider_ids = [item["id"] for item in validated_data["providers"]]
providers = Provider.objects.filter(id__in=provider_ids)
tenant_id = self.context.get("tenant_id")
instance.providers.clear()
new_relationships = [
ProviderGroupMembership(
provider_group=instance, provider=p, tenant_id=tenant_id
)
for p in providers
]
ProviderGroupMembership.objects.bulk_create(new_relationships)
return instance
class Meta:
model = ProviderGroupMembership
fields = ["id", "provider_ids"]
fields = ["id", "providers"]
# Providers
@@ -1034,6 +1154,8 @@ class InvitationSerializer(RLSSerializer):
Serializer for the Invitation model.
"""
roles = serializers.ResourceRelatedField(many=True, queryset=Role.objects.all())
class Meta:
model = Invitation
fields = [
@@ -1043,6 +1165,7 @@ class InvitationSerializer(RLSSerializer):
"email",
"state",
"token",
"roles",
"expires_at",
"inviter",
"url",
@@ -1050,6 +1173,8 @@ class InvitationSerializer(RLSSerializer):
class InvitationBaseWriteSerializer(BaseWriteSerializer):
roles = serializers.ResourceRelatedField(many=True, queryset=Role.objects.all())
def validate_email(self, value):
user = User.objects.filter(email=value).first()
tenant_id = self.context["tenant_id"]
@@ -1086,31 +1211,54 @@ class InvitationCreateSerializer(InvitationBaseWriteSerializer, RLSSerializer):
class Meta:
model = Invitation
fields = ["email", "expires_at", "state", "token", "inviter"]
fields = ["email", "expires_at", "state", "token", "inviter", "roles"]
extra_kwargs = {
"token": {"read_only": True},
"state": {"read_only": True},
"inviter": {"read_only": True},
"expires_at": {"required": False},
"roles": {"required": False},
}
def create(self, validated_data):
inviter = self.context.get("request").user
tenant_id = self.context.get("tenant_id")
validated_data["inviter"] = inviter
return super().create(validated_data)
roles = validated_data.pop("roles", [])
invitation = super().create(validated_data)
for role in roles:
InvitationRoleRelationship.objects.create(
role=role, invitation=invitation, tenant_id=tenant_id
)
return invitation
class InvitationUpdateSerializer(InvitationBaseWriteSerializer):
class Meta:
model = Invitation
fields = ["id", "email", "expires_at", "state", "token"]
fields = ["id", "email", "expires_at", "state", "token", "roles"]
extra_kwargs = {
"token": {"read_only": True},
"state": {"read_only": True},
"expires_at": {"required": False},
"email": {"required": False},
"roles": {"required": False},
}
def update(self, instance, validated_data):
roles = validated_data.pop("roles", [])
tenant_id = self.context.get("tenant_id")
invitation = super().update(instance, validated_data)
if roles:
instance.roles.clear()
for role in roles:
InvitationRoleRelationship.objects.create(
role=role, invitation=invitation, tenant_id=tenant_id
)
return invitation
class InvitationAcceptSerializer(RLSSerializer):
"""Serializer for accepting an invitation."""
@@ -1122,6 +1270,196 @@ class InvitationAcceptSerializer(RLSSerializer):
fields = ["invitation_token"]
# Roles
class RoleSerializer(RLSSerializer, BaseWriteSerializer):
provider_groups = serializers.ResourceRelatedField(
many=True, queryset=ProviderGroup.objects.all()
)
permission_state = serializers.SerializerMethodField()
def get_permission_state(self, obj):
permission_fields = [
"manage_users",
"manage_account",
"manage_billing",
"manage_providers",
"manage_integrations",
"manage_scans",
]
values = [getattr(obj, field) for field in permission_fields]
if all(values):
return "unlimited"
elif not any(values):
return "none"
else:
return "limited"
def validate(self, attrs):
if Role.objects.filter(name=attrs.get("name")).exists():
raise serializers.ValidationError(
{"name": "A role with this name already exists."}
)
if attrs.get("manage_providers"):
attrs["unlimited_visibility"] = True
# Prevent updates to the admin role
if getattr(self.instance, "name", None) == "admin":
raise serializers.ValidationError(
{"name": "The admin role cannot be updated."}
)
return super().validate(attrs)
class Meta:
model = Role
fields = [
"id",
"name",
"manage_users",
"manage_account",
"manage_billing",
"manage_providers",
"manage_integrations",
"manage_scans",
"permission_state",
"unlimited_visibility",
"inserted_at",
"updated_at",
"provider_groups",
"users",
"invitations",
"url",
]
extra_kwargs = {
"id": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"users": {"read_only": True},
"url": {"read_only": True},
}
class RoleCreateSerializer(RoleSerializer):
def create(self, validated_data):
provider_groups = validated_data.pop("provider_groups", [])
users = validated_data.pop("users", [])
tenant_id = self.context.get("tenant_id")
role = Role.objects.create(tenant_id=tenant_id, **validated_data)
through_model_instances = [
RoleProviderGroupRelationship(
role=role,
provider_group=provider_group,
tenant_id=tenant_id,
)
for provider_group in provider_groups
]
RoleProviderGroupRelationship.objects.bulk_create(through_model_instances)
through_model_instances = [
UserRoleRelationship(
role=user,
user=user,
tenant_id=tenant_id,
)
for user in users
]
UserRoleRelationship.objects.bulk_create(through_model_instances)
return role
class RoleUpdateSerializer(RoleSerializer):
class Meta:
model = Role
fields = [
"id",
"name",
"manage_users",
"manage_account",
"manage_billing",
"manage_providers",
"manage_integrations",
"manage_scans",
"unlimited_visibility",
]
class ProviderGroupResourceIdentifierSerializer(serializers.Serializer):
resource_type = serializers.CharField(source="type")
id = serializers.UUIDField()
class JSONAPIMeta:
resource_name = "provider-group-identifier"
def to_representation(self, instance):
"""
Ensure 'type' is used in the output instead of 'resource_type'.
"""
representation = super().to_representation(instance)
representation["type"] = representation.pop("resource_type", None)
return representation
def to_internal_value(self, data):
"""
Map 'type' back to 'resource_type' during input.
"""
data["resource_type"] = data.pop("type", None)
return super().to_internal_value(data)
class RoleProviderGroupRelationshipSerializer(RLSSerializer, BaseWriteSerializer):
"""
Serializer for modifying role memberships
"""
provider_groups = serializers.ListField(
child=ProviderGroupResourceIdentifierSerializer(),
help_text="List of resource identifier objects representing provider groups.",
)
def create(self, validated_data):
provider_group_ids = [item["id"] for item in validated_data["provider_groups"]]
provider_groups = ProviderGroup.objects.filter(id__in=provider_group_ids)
tenant_id = self.context.get("tenant_id")
new_relationships = [
RoleProviderGroupRelationship(
role=self.context.get("role"), provider_group=pg, tenant_id=tenant_id
)
for pg in provider_groups
]
RoleProviderGroupRelationship.objects.bulk_create(new_relationships)
return self.context.get("role")
def update(self, instance, validated_data):
provider_group_ids = [item["id"] for item in validated_data["provider_groups"]]
provider_groups = ProviderGroup.objects.filter(id__in=provider_group_ids)
tenant_id = self.context.get("tenant_id")
instance.provider_groups.clear()
new_relationships = [
RoleProviderGroupRelationship(
role=instance, provider_group=pg, tenant_id=tenant_id
)
for pg in provider_groups
]
RoleProviderGroupRelationship.objects.bulk_create(new_relationships)
return instance
class Meta:
model = RoleProviderGroupRelationship
fields = ["id", "provider_groups"]
# Compliance overview

View File

@@ -3,16 +3,20 @@ from drf_spectacular.views import SpectacularRedocView
from rest_framework_nested import routers
from api.v1.views import (
ComplianceOverviewViewSet,
CustomTokenObtainView,
CustomTokenRefreshView,
FindingViewSet,
InvitationAcceptViewSet,
InvitationViewSet,
MembershipViewSet,
OverviewViewSet,
ProviderGroupViewSet,
ProviderGroupProvidersRelationshipView,
ProviderSecretViewSet,
InvitationViewSet,
InvitationAcceptViewSet,
RoleViewSet,
RoleProviderGroupRelationshipView,
UserRoleRelationshipView,
OverviewViewSet,
ComplianceOverviewViewSet,
ProviderViewSet,
ResourceViewSet,
ScanViewSet,
@@ -29,11 +33,12 @@ router = routers.DefaultRouter(trailing_slash=False)
router.register(r"users", UserViewSet, basename="user")
router.register(r"tenants", TenantViewSet, basename="tenant")
router.register(r"providers", ProviderViewSet, basename="provider")
router.register(r"provider_groups", ProviderGroupViewSet, basename="providergroup")
router.register(r"provider-groups", ProviderGroupViewSet, basename="providergroup")
router.register(r"scans", ScanViewSet, basename="scan")
router.register(r"tasks", TaskViewSet, basename="task")
router.register(r"resources", ResourceViewSet, basename="resource")
router.register(r"findings", FindingViewSet, basename="finding")
router.register(r"roles", RoleViewSet, basename="role")
router.register(
r"compliance-overviews", ComplianceOverviewViewSet, basename="complianceoverview"
)
@@ -80,6 +85,27 @@ urlpatterns = [
InvitationAcceptViewSet.as_view({"post": "accept"}),
name="invitation-accept",
),
path(
"roles/<uuid:pk>/relationships/provider_groups",
RoleProviderGroupRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="role-provider-groups-relationship",
),
path(
"users/<uuid:pk>/relationships/roles",
UserRoleRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="user-roles-relationship",
),
path(
"provider-groups/<uuid:pk>/relationships/providers",
ProviderGroupProvidersRelationshipView.as_view(
{"post": "create", "patch": "partial_update", "delete": "destroy"}
),
name="provider_group-providers-relationship",
),
path("", include(router.urls)),
path("", include(tenants_router.urls)),
path("", include(users_router.urls)),

View File

@@ -8,6 +8,7 @@ from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from drf_spectacular.settings import spectacular_settings
from drf_spectacular_jsonapi.schemas.openapi import JsonApiAutoSchema
from drf_spectacular.utils import (
OpenApiParameter,
OpenApiResponse,
@@ -25,12 +26,15 @@ from rest_framework.exceptions import (
ValidationError,
)
from rest_framework.generics import GenericAPIView, get_object_or_404
from rest_framework_json_api.views import Response
from rest_framework_json_api.views import RelationshipView, Response
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
from rest_framework.permissions import SAFE_METHODS
from tasks.beat import schedule_provider_scan
from tasks.tasks import (
check_provider_connection_task,
delete_provider_task,
delete_tenant_task,
perform_scan_summary_task,
perform_scan_task,
)
@@ -51,8 +55,12 @@ from api.filters import (
TaskFilter,
TenantFilter,
UserFilter,
RoleFilter,
)
from api.models import (
StatusChoices,
User,
UserRoleRelationship,
ComplianceOverview,
Finding,
Invitation,
@@ -61,20 +69,27 @@ from api.models import (
ProviderGroup,
ProviderGroupMembership,
ProviderSecret,
Role,
RoleProviderGroupRelationship,
Resource,
Scan,
ScanSummary,
SeverityChoices,
StateChoices,
StatusChoices,
Task,
User,
)
from api.pagination import ComplianceOverviewPagination
from api.rbac.permissions import DISABLE_RBAC, HasPermissions, Permissions
from api.rls import Tenant
from api.utils import validate_invitation
from api.uuid_utils import datetime_to_uuid7
from api.v1.serializers import (
TokenSerializer,
TokenRefreshSerializer,
UserSerializer,
UserCreateSerializer,
UserUpdateSerializer,
UserRoleRelationshipSerializer,
ComplianceOverviewFullSerializer,
ComplianceOverviewSerializer,
FindingDynamicFilterSerializer,
@@ -88,34 +103,39 @@ from api.v1.serializers import (
OverviewProviderSerializer,
OverviewSeveritySerializer,
ProviderCreateSerializer,
ProviderGroupMembershipUpdateSerializer,
ProviderGroupMembershipSerializer,
ProviderGroupSerializer,
ProviderGroupUpdateSerializer,
ProviderSecretCreateSerializer,
ProviderSecretSerializer,
ProviderSecretUpdateSerializer,
RoleProviderGroupRelationshipSerializer,
ProviderSerializer,
ProviderUpdateSerializer,
ResourceSerializer,
ScanCreateSerializer,
ScanSerializer,
ScanUpdateSerializer,
ScheduleDailyCreateSerializer,
TaskSerializer,
TenantSerializer,
TokenRefreshSerializer,
TokenSerializer,
UserCreateSerializer,
UserSerializer,
UserUpdateSerializer,
TaskSerializer,
ScanSerializer,
ScanCreateSerializer,
ScanUpdateSerializer,
ResourceSerializer,
ProviderSecretSerializer,
ProviderSecretUpdateSerializer,
ProviderSecretCreateSerializer,
RoleSerializer,
RoleCreateSerializer,
RoleUpdateSerializer,
ScheduleDailyCreateSerializer,
)
CACHE_DECORATOR = cache_control(
max_age=django_settings.CACHE_MAX_AGE,
stale_while_revalidate=django_settings.CACHE_STALE_WHILE_REVALIDATE,
)
class RelationshipViewSchema(JsonApiAutoSchema):
def _resolve_path_parameters(self, _path_variables):
return []
@extend_schema(
tags=["Token"],
summary="Obtain a token",
@@ -171,7 +191,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.0.0"
spectacular_settings.VERSION = "1.0.1"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -270,6 +290,26 @@ class UserViewSet(BaseUserViewset):
filterset_class = UserFilter
ordering = ["-date_joined"]
ordering_fields = ["name", "email", "company_name", "date_joined", "is_active"]
required_permissions = [Permissions.MANAGE_USERS]
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.required_permissions = self.get_required_permissions()
super().initial(request, *args, **kwargs)
def get_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if self.action == "me":
# No permissions required for me request
return []
else:
# Require permission for the rest of the requests
return [Permissions.MANAGE_USERS]
def get_queryset(self):
# If called during schema generation, return an empty queryset
@@ -346,11 +386,124 @@ class UserViewSet(BaseUserViewset):
user=user, tenant=tenant, role=role
)
if invitation:
# TODO: Add roles to output relationships
user_role = []
for role in invitation.roles.all():
user_role.append(
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user, role=role, tenant=invitation.tenant
)
)
invitation.state = Invitation.State.ACCEPTED
invitation.save(using=MainRouter.admin_db)
else:
role = Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user,
role=role,
tenant_id=tenant.id,
)
return Response(data=UserSerializer(user).data, status=status.HTTP_201_CREATED)
@extend_schema_view(
create=extend_schema(
tags=["User"],
summary="Create a new user-roles relationship",
description="Add a new user-roles relationship to the system by providing the required user-roles details.",
responses={
204: OpenApiResponse(description="Relationship created successfully"),
400: OpenApiResponse(
description="Bad request (e.g., relationship already exists)"
),
},
),
partial_update=extend_schema(
tags=["User"],
summary="Partially update a user-roles relationship",
description="Update the user-roles relationship information without affecting other fields.",
responses={
204: OpenApiResponse(
response=None, description="Relationship updated successfully"
)
},
),
destroy=extend_schema(
tags=["User"],
summary="Delete a user-roles relationship",
description="Remove the user-roles relationship from the system by their ID.",
responses={
204: OpenApiResponse(
response=None, description="Relationship deleted successfully"
)
},
),
)
class UserRoleRelationshipView(RelationshipView, BaseRLSViewSet):
queryset = User.objects.all()
serializer_class = UserRoleRelationshipSerializer
resource_name = "roles"
http_method_names = ["post", "patch", "delete"]
schema = RelationshipViewSchema()
def get_queryset(self):
return User.objects.all()
def create(self, request, *args, **kwargs):
user = self.get_object()
role_ids = [item["id"] for item in request.data]
existing_relationships = UserRoleRelationship.objects.filter(
user=user, role_id__in=role_ids
)
if existing_relationships.exists():
return Response(
{"detail": "One or more roles are already associated with the user."},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = self.get_serializer(
data={"roles": request.data},
context={
"user": user,
"tenant_id": self.request.tenant_id,
"request": request,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def partial_update(self, request, *args, **kwargs):
user = self.get_object()
serializer = self.get_serializer(
instance=user,
data={"roles": request.data},
context={"tenant_id": self.request.tenant_id, "request": request},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def destroy(self, request, *args, **kwargs):
user = self.get_object()
user.roles.clear()
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
list=extend_schema(
tags=["Tenant"],
@@ -388,6 +541,8 @@ class TenantViewSet(BaseTenantViewset):
search_fields = ["name"]
ordering = ["-inserted_at"]
ordering_fields = ["name", "inserted_at", "updated_at"]
required_permissions = [Permissions.MANAGE_ACCOUNT]
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def get_queryset(self):
return Tenant.objects.all()
@@ -401,6 +556,25 @@ class TenantViewSet(BaseTenantViewset):
)
return Response(data=serializer.data, status=status.HTTP_201_CREATED)
def destroy(self, request, *args, **kwargs):
# This will perform validation and raise a 404 if the tenant does not exist
tenant_id = kwargs.get("pk")
get_object_or_404(Tenant, id=tenant_id)
with transaction.atomic():
# Delete memberships
Membership.objects.using(MainRouter.admin_db).filter(
tenant_id=tenant_id
).delete()
# Delete users without memberships
User.objects.using(MainRouter.admin_db).filter(
membership__isnull=True
).delete()
# Delete tenant in batches
delete_tenant_task.apply_async(kwargs={"tenant_id": tenant_id})
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
list=extend_schema(
@@ -542,66 +716,141 @@ class ProviderGroupViewSet(BaseRLSViewSet):
queryset = ProviderGroup.objects.all()
serializer_class = ProviderGroupSerializer
filterset_class = ProviderGroupFilter
http_method_names = ["get", "post", "patch", "put", "delete"]
http_method_names = ["get", "post", "patch", "delete"]
ordering = ["inserted_at"]
required_permissions = []
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.required_permissions = self.get_required_permissions()
super().initial(request, *args, **kwargs)
def get_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if DISABLE_RBAC or self.request.method in SAFE_METHODS:
# No permissions required for GET requests
return []
else:
# Require permission for non-GET requests
return [Permissions.MANAGE_PROVIDERS]
def get_queryset(self):
return ProviderGroup.objects.prefetch_related("providers")
user = self.request.user
user_roles = user.roles.all()
# Check if any of the user's roles have UNLIMITED_VISIBILITY
if DISABLE_RBAC or getattr(
user_roles[0], Permissions.UNLIMITED_VISIBILITY.value, False
):
# User has unlimited visibility, return all provider groups
return ProviderGroup.objects.prefetch_related("providers")
# Collect provider groups associated with the user's roles
provider_groups = (
ProviderGroup.objects.filter(roles__in=user_roles)
.distinct()
.prefetch_related("providers")
)
return provider_groups
def get_serializer_class(self):
if self.action == "partial_update":
return ProviderGroupUpdateSerializer
elif self.action == "providers":
if hasattr(self, "response_serializer_class"):
return self.response_serializer_class
return ProviderGroupMembershipUpdateSerializer
return super().get_serializer_class()
@extend_schema(
tags=["Provider Group"],
summary="Add providers to a provider group",
description="Add one or more providers to an existing provider group.",
request=ProviderGroupMembershipUpdateSerializer,
responses={200: OpenApiResponse(response=ProviderGroupSerializer)},
)
@action(detail=True, methods=["put"], url_name="providers")
def providers(self, request, pk=None):
@extend_schema(tags=["Provider Group"])
@extend_schema_view(
create=extend_schema(
summary="Create a new provider_group-providers relationship",
description="Add a new provider_group-providers relationship to the system by providing the required provider_group-providers details.",
responses={
204: OpenApiResponse(description="Relationship created successfully"),
400: OpenApiResponse(
description="Bad request (e.g., relationship already exists)"
),
},
),
partial_update=extend_schema(
summary="Partially update a provider_group-providers relationship",
description="Update the provider_group-providers relationship information without affecting other fields.",
responses={
204: OpenApiResponse(
response=None, description="Relationship updated successfully"
)
},
),
destroy=extend_schema(
summary="Delete a provider_group-providers relationship",
description="Remove the provider_group-providers relationship from the system by their ID.",
responses={
204: OpenApiResponse(
response=None, description="Relationship deleted successfully"
)
},
),
)
class ProviderGroupProvidersRelationshipView(RelationshipView, BaseRLSViewSet):
queryset = ProviderGroup.objects.all()
serializer_class = ProviderGroupMembershipSerializer
resource_name = "providers"
http_method_names = ["post", "patch", "delete"]
schema = RelationshipViewSchema()
def get_queryset(self):
return ProviderGroup.objects.all()
def create(self, request, *args, **kwargs):
provider_group = self.get_object()
# Validate input data
serializer = self.get_serializer_class()(
data=request.data,
context=self.get_serializer_context(),
provider_ids = [item["id"] for item in request.data]
existing_relationships = ProviderGroupMembership.objects.filter(
provider_group=provider_group, provider_id__in=provider_ids
)
if existing_relationships.exists():
return Response(
{
"detail": "One or more providers are already associated with the provider_group."
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = self.get_serializer(
data={"providers": request.data},
context={
"provider_group": provider_group,
"tenant_id": self.request.tenant_id,
"request": request,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
provider_ids = serializer.validated_data["provider_ids"]
return Response(status=status.HTTP_204_NO_CONTENT)
# Update memberships
ProviderGroupMembership.objects.filter(
provider_group=provider_group, tenant_id=request.tenant_id
).delete()
provider_group_memberships = [
ProviderGroupMembership(
tenant_id=self.request.tenant_id,
provider_group=provider_group,
provider_id=provider_id,
)
for provider_id in provider_ids
]
ProviderGroupMembership.objects.bulk_create(
provider_group_memberships, ignore_conflicts=True
def partial_update(self, request, *args, **kwargs):
provider_group = self.get_object()
serializer = self.get_serializer(
instance=provider_group,
data={"providers": request.data},
context={"tenant_id": self.request.tenant_id, "request": request},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
# Return the updated provider group with providers
provider_group.refresh_from_db()
self.response_serializer_class = ProviderGroupSerializer
response_serializer = ProviderGroupSerializer(
provider_group, context=self.get_serializer_context()
)
return Response(data=response_serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, *args, **kwargs):
provider_group = self.get_object()
provider_group.providers.clear()
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
@@ -651,9 +900,43 @@ class ProviderViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
required_permissions = []
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.required_permissions = self.get_required_permissions()
super().initial(request, *args, **kwargs)
def get_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if DISABLE_RBAC or self.request.method in SAFE_METHODS:
# No permissions required for GET requests
return []
else:
# Require permission for non-GET requests
return [Permissions.MANAGE_PROVIDERS]
def get_queryset(self):
return Provider.objects.all()
user = self.request.user
user_roles = user.roles.all()
if DISABLE_RBAC or getattr(
user_roles[0], Permissions.UNLIMITED_VISIBILITY.value, False
):
# User has unlimited visibility, return all providers
return Provider.objects.all()
# User lacks permission, filter providers based on provider groups associated with the role
provider_groups = user_roles[0].provider_groups.all()
providers = Provider.objects.filter(
provider_groups__in=provider_groups
).distinct()
return providers
def get_serializer_class(self):
if self.action == "create":
@@ -773,9 +1056,42 @@ class ScanViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
required_permissions = [Permissions.MANAGE_SCANS]
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.required_permissions = self.get_required_permissions()
super().initial(request, *args, **kwargs)
def get_required_permissions(self):
"""
Returns the required permissions based on the request method.
"""
if DISABLE_RBAC or self.request.method in SAFE_METHODS:
# No permissions required for GET requests
return []
else:
# Require permission for non-GET requests
return [Permissions.MANAGE_SCANS]
def get_queryset(self):
return Scan.objects.all()
user = self.request.user
user_roles = user.roles.all()
if DISABLE_RBAC or getattr(
user_roles[0], Permissions.UNLIMITED_VISIBILITY.value, False
):
# User has unlimited visibility, return all scans
return Scan.objects.all()
# User lacks permission, filter providers based on provider groups associated with the role
provider_groups = user_roles[0].provider_groups.all()
providers = Provider.objects.filter(
provider_groups__in=provider_groups
).distinct()
return Scan.objects.filter(provider__in=providers).distinct()
def get_serializer_class(self):
if self.action == "create":
@@ -865,11 +1181,28 @@ class TaskViewSet(BaseRLSViewSet):
search_fields = ["name"]
ordering = ["-inserted_at"]
ordering_fields = ["inserted_at", "completed_at", "name", "state"]
required_permissions = []
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def get_queryset(self):
return Task.objects.annotate(
name=F("task_runner_task__task_name"), state=F("task_runner_task__status")
)
user = self.request.user
user_roles = user.roles.all()
if DISABLE_RBAC or getattr(
user_roles[0], Permissions.UNLIMITED_VISIBILITY.value, False
):
# User has unlimited visibility, return all tasks
return Task.objects.annotate(
name=F("task_runner_task__task_name"),
state=F("task_runner_task__status"),
)
# User lacks permission, filter tasks based on provider groups associated with the role
provider_groups = user_roles[0].provider_groups.all()
providers = Provider.objects.filter(
provider_groups__in=provider_groups
).distinct()
scans = Scan.objects.filter(provider__in=providers).distinct()
return Task.objects.filter(scan__in=scans).distinct()
def destroy(self, request, *args, pk=None, **kwargs):
task = get_object_or_404(Task, pk=pk)
@@ -930,11 +1263,33 @@ class ResourceViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
required_permissions = []
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.required_permissions = ResourceViewSet.required_permissions
super().initial(request, *args, **kwargs)
def get_queryset(self):
queryset = Resource.objects.all()
search_value = self.request.query_params.get("filter[search]", None)
user = self.request.user
user_roles = user.roles.all()
if DISABLE_RBAC or getattr(
user_roles[0], Permissions.UNLIMITED_VISIBILITY.value, False
):
# User has unlimited visibility, return all scans
queryset = Resource.objects.all()
else:
# User lacks permission, filter providers based on provider groups associated with the role
provider_groups = user_roles[0].provider_groups.all()
providers = Provider.objects.filter(
provider_groups__in=provider_groups
).distinct()
queryset = Resource.objects.filter(provider__in=providers).distinct()
search_value = self.request.query_params.get("filter[search]", None)
if search_value:
# Django's ORM will build a LEFT JOIN and OUTER JOIN on the "through" table, resulting in duplicates
# The duplicates then require a `distinct` query
@@ -1005,11 +1360,15 @@ class FindingViewSet(BaseRLSViewSet):
"inserted_at",
"updated_at",
]
required_permissions = []
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def inserted_at_to_uuidv7(self, inserted_at):
if inserted_at is None:
return None
return datetime_to_uuid7(inserted_at)
def initial(self, request, *args, **kwargs):
"""
Sets required_permissions before permissions are checked.
"""
self.required_permissions = ResourceViewSet.required_permissions
super().initial(request, *args, **kwargs)
def get_serializer_class(self):
if self.action == "findings_services_regions":
@@ -1018,9 +1377,23 @@ class FindingViewSet(BaseRLSViewSet):
return super().get_serializer_class()
def get_queryset(self):
queryset = Finding.objects.all()
search_value = self.request.query_params.get("filter[search]", None)
user = self.request.user
user_roles = user.roles.all()
if DISABLE_RBAC or getattr(
user_roles[0], Permissions.UNLIMITED_VISIBILITY.value, False
):
# User has unlimited visibility, return all scans
queryset = Finding.objects.all()
else:
# User lacks permission, filter providers based on provider groups associated with the role
provider_groups = user_roles[0].provider_groups.all()
providers = Provider.objects.filter(
provider_groups__in=provider_groups
).distinct()
scans = Scan.objects.filter(provider__in=providers).distinct()
queryset = Finding.objects.filter(scan__in=scans).distinct()
search_value = self.request.query_params.get("filter[search]", None)
if search_value:
# Django's ORM will build a LEFT JOIN and OUTER JOIN on any "through" tables, resulting in duplicates
# The duplicates then require a `distinct` query
@@ -1048,6 +1421,11 @@ class FindingViewSet(BaseRLSViewSet):
return queryset
def inserted_at_to_uuidv7(self, inserted_at):
if inserted_at is None:
return None
return datetime_to_uuid7(inserted_at)
@action(detail=False, methods=["get"], url_name="findings_services_regions")
def findings_services_regions(self, request):
queryset = self.get_queryset()
@@ -1168,6 +1546,8 @@ class InvitationViewSet(BaseRLSViewSet):
"state",
"inviter",
]
required_permissions = [Permissions.MANAGE_ACCOUNT]
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def get_queryset(self):
return Invitation.objects.all()
@@ -1255,6 +1635,14 @@ class InvitationAcceptViewSet(BaseRLSViewSet):
user=user,
tenant=invitation.tenant,
)
# TODO: Add roles to output relationships
user_role = []
for role in invitation.roles.all():
user_role.append(
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user, role=role, tenant=invitation.tenant
)
)
invitation.state = Invitation.State.ACCEPTED
invitation.save(using=MainRouter.admin_db)
@@ -1263,6 +1651,153 @@ class InvitationAcceptViewSet(BaseRLSViewSet):
return Response(data=membership_serializer.data, status=status.HTTP_201_CREATED)
@extend_schema(tags=["Role"])
@extend_schema_view(
list=extend_schema(
tags=["Role"],
summary="List all roles",
description="Retrieve a list of all roles with options for filtering by various criteria.",
),
retrieve=extend_schema(
tags=["Role"],
summary="Retrieve data from a role",
description="Fetch detailed information about a specific role by their ID.",
),
create=extend_schema(
tags=["Role"],
summary="Create a new role",
description="Add a new role to the system by providing the required role details.",
),
partial_update=extend_schema(
tags=["Role"],
summary="Partially update a role",
description="Update certain fields of an existing role's information without affecting other fields.",
responses={200: RoleSerializer},
),
destroy=extend_schema(
tags=["Role"],
summary="Delete a role",
description="Remove a role from the system by their ID.",
),
)
class RoleViewSet(BaseRLSViewSet):
queryset = Role.objects.all()
serializer_class = RoleSerializer
filterset_class = RoleFilter
http_method_names = ["get", "post", "patch", "delete"]
ordering = ["inserted_at"]
required_permissions = [Permissions.MANAGE_ACCOUNT]
permission_classes = BaseRLSViewSet.permission_classes + [HasPermissions]
def get_queryset(self):
return Role.objects.all()
def get_serializer_class(self):
if self.action == "create":
return RoleCreateSerializer
elif self.action == "partial_update":
return RoleUpdateSerializer
return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs):
user = request.user
user_role = user.roles.all().first()
# If the user is the owner of the role, the manage_account field is not editable
if user_role and kwargs["pk"] == str(user_role.id):
request.data["manage_account"] = str(user_role.manage_account).lower()
return super().partial_update(request, *args, **kwargs)
@extend_schema_view(
create=extend_schema(
tags=["Role"],
summary="Create a new role-provider_groups relationship",
description="Add a new role-provider_groups relationship to the system by providing the required role-provider_groups details.",
responses={
204: OpenApiResponse(description="Relationship created successfully"),
400: OpenApiResponse(
description="Bad request (e.g., relationship already exists)"
),
},
),
partial_update=extend_schema(
tags=["Role"],
summary="Partially update a role-provider_groups relationship",
description="Update the role-provider_groups relationship information without affecting other fields.",
responses={
204: OpenApiResponse(
response=None, description="Relationship updated successfully"
)
},
),
destroy=extend_schema(
tags=["Role"],
summary="Delete a role-provider_groups relationship",
description="Remove the role-provider_groups relationship from the system by their ID.",
responses={
204: OpenApiResponse(
response=None, description="Relationship deleted successfully"
)
},
),
)
class RoleProviderGroupRelationshipView(RelationshipView, BaseRLSViewSet):
queryset = Role.objects.all()
serializer_class = RoleProviderGroupRelationshipSerializer
resource_name = "provider_groups"
http_method_names = ["post", "patch", "delete"]
schema = RelationshipViewSchema()
def get_queryset(self):
return Role.objects.all()
def create(self, request, *args, **kwargs):
role = self.get_object()
provider_group_ids = [item["id"] for item in request.data]
existing_relationships = RoleProviderGroupRelationship.objects.filter(
role=role, provider_group_id__in=provider_group_ids
)
if existing_relationships.exists():
return Response(
{
"detail": "One or more provider groups are already associated with the role."
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = self.get_serializer(
data={"provider_groups": request.data},
context={
"role": role,
"tenant_id": self.request.tenant_id,
"request": request,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def partial_update(self, request, *args, **kwargs):
role = self.get_object()
serializer = self.get_serializer(
instance=role,
data={"provider_groups": request.data},
context={"tenant_id": self.request.tenant_id, "request": request},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def destroy(self, request, *args, **kwargs):
role = self.get_object()
role.provider_groups.clear()
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
list=extend_schema(
tags=["Compliance Overview"],

View File

@@ -207,3 +207,6 @@ CACHE_STALE_WHILE_REVALIDATE = env.int("DJANGO_STALE_WHILE_REVALIDATE", 60)
TESTING = False
# Disable RBAC during tests/demos
DISABLE_RBAC = False

View File

@@ -10,6 +10,7 @@ from prowler.lib.check.models import Severity
from prowler.lib.outputs.finding import Status
from rest_framework import status
from rest_framework.test import APIClient
from unittest.mock import patch
from api.models import (
Finding,
@@ -20,6 +21,7 @@ from api.models import (
ProviderGroup,
Resource,
ResourceTag,
Role,
Scan,
StateChoices,
Task,
@@ -27,6 +29,7 @@ from api.models import (
ProviderSecret,
Invitation,
ComplianceOverview,
UserRoleRelationship,
)
from api.rls import Tenant
from api.v1.serializers import TokenSerializer
@@ -72,6 +75,16 @@ def disable_logging():
logging.disable(logging.CRITICAL)
@pytest.fixture(scope="function")
def patch_testing_flag():
"""
Fixture to patch the TESTING flag to True during tests.
"""
with patch("api.rbac.permissions.DISABLE_RBAC", True):
with patch("api.v1.views.DISABLE_RBAC", True):
yield
@pytest.fixture(scope="session", autouse=True)
def create_test_user(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
@@ -83,6 +96,106 @@ def create_test_user(django_db_setup, django_db_blocker):
return user
@pytest.fixture(scope="function")
def create_test_user_rbac(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
user = User.objects.create_user(
name="testing",
email="rbac@rbac.com",
password=TEST_PASSWORD,
)
tenant = Tenant.objects.create(
name="Tenant Test",
)
Membership.objects.create(
user=user,
tenant=tenant,
role=Membership.RoleChoices.OWNER,
)
Role.objects.create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.create(
user=user,
role=Role.objects.get(name="admin"),
tenant_id=tenant.id,
)
return user
@pytest.fixture(scope="function")
def create_test_user_rbac_limited(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
user = User.objects.create_user(
name="testing_limited",
email="rbac_limited@rbac.com",
password=TEST_PASSWORD,
)
tenant = Tenant.objects.create(
name="Tenant Test",
)
Membership.objects.create(
user=user,
tenant=tenant,
role=Membership.RoleChoices.OWNER,
)
Role.objects.create(
name="limited",
tenant_id=tenant.id,
manage_users=False,
manage_account=False,
manage_billing=False,
manage_providers=False,
manage_integrations=False,
manage_scans=False,
unlimited_visibility=False,
)
UserRoleRelationship.objects.create(
user=user,
role=Role.objects.get(name="limited"),
tenant_id=tenant.id,
)
return user
@pytest.fixture
def authenticated_client_rbac(create_test_user_rbac, tenants_fixture, client):
client.user = create_test_user_rbac
serializer = TokenSerializer(
data={"type": "tokens", "email": "rbac@rbac.com", "password": TEST_PASSWORD}
)
serializer.is_valid()
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
@pytest.fixture
def authenticated_client_no_permissions_rbac(
create_test_user_rbac_limited, tenants_fixture, client
):
client.user = create_test_user_rbac_limited
serializer = TokenSerializer(
data={
"type": "tokens",
"email": "rbac_limited@rbac.com",
"password": TEST_PASSWORD,
}
)
serializer.is_valid()
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
@pytest.fixture
def authenticated_client(create_test_user, tenants_fixture, client):
client.user = create_test_user
@@ -104,6 +217,7 @@ def authenticated_api_client(create_test_user, tenants_fixture):
serializer.is_valid()
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
@@ -128,6 +242,7 @@ def tenants_fixture(create_test_user):
tenant3 = Tenant.objects.create(
name="Tenant Three",
)
return tenant1, tenant2, tenant3
@@ -210,6 +325,46 @@ def provider_groups_fixture(tenants_fixture):
return pgroup1, pgroup2, pgroup3
@pytest.fixture
def roles_fixture(tenants_fixture):
tenant, *_ = tenants_fixture
role1 = Role.objects.create(
name="Role One",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=False,
manage_scans=True,
unlimited_visibility=False,
)
role2 = Role.objects.create(
name="Role Two",
tenant_id=tenant.id,
manage_users=False,
manage_account=False,
manage_billing=False,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
role3 = Role.objects.create(
name="Role Three",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
return role1, role2, role3
@pytest.fixture
def provider_secret_fixture(providers_fixture):
return tuple(

View File

@@ -1,8 +1,9 @@
from celery.utils.log import get_task_logger
from django.db import transaction
from api.db_utils import batch_delete
from api.models import Finding, Provider, Resource, Scan, ScanSummary
from api.db_router import MainRouter
from api.db_utils import batch_delete, tenant_transaction
from api.models import Finding, Provider, Resource, Scan, ScanSummary, Tenant
logger = get_task_logger(__name__)
@@ -49,3 +50,26 @@ def delete_provider(pk: str):
deletion_summary.update(provider_summary)
return deletion_summary
def delete_tenant(pk: str):
"""
Gracefully deletes an instance of a tenant along with its related data.
Args:
pk (str): The primary key of the Tenant instance to delete.
Returns:
dict: A dictionary with the count of deleted objects per model,
including related models.
"""
deletion_summary = {}
for provider in Provider.objects.using(MainRouter.admin_db).filter(tenant_id=pk):
with tenant_transaction(pk):
summary = delete_provider(provider.id)
deletion_summary.update(summary)
Tenant.objects.using(MainRouter.admin_db).filter(id=pk).delete()
return deletion_summary

View File

@@ -4,7 +4,7 @@ from celery import shared_task
from config.celery import RLSTask
from django_celery_beat.models import PeriodicTask
from tasks.jobs.connection import check_provider_connection
from tasks.jobs.deletion import delete_provider
from tasks.jobs.deletion import delete_provider, delete_tenant
from tasks.jobs.scan import aggregate_findings, perform_prowler_scan
from api.db_utils import tenant_transaction
@@ -134,3 +134,8 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
@shared_task(name="scan-summary")
def perform_scan_summary_task(tenant_id: str, scan_id: str):
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
@shared_task(name="tenant-deletion")
def delete_tenant_task(tenant_id: str):
return delete_tenant(pk=tenant_id)

View File

@@ -1,13 +1,15 @@
from unittest.mock import patch
import pytest
from django.core.exceptions import ObjectDoesNotExist
from tasks.jobs.deletion import delete_provider
from tasks.jobs.deletion import delete_provider, delete_tenant
from api.models import Provider
from api.models import Provider, Tenant
@pytest.mark.django_db
class TestDeleteInstance:
def test_delete_instance_success(self, providers_fixture):
class TestDeleteProvider:
def test_delete_provider_success(self, providers_fixture):
instance = providers_fixture[0]
result = delete_provider(instance.id)
@@ -15,8 +17,47 @@ class TestDeleteInstance:
with pytest.raises(ObjectDoesNotExist):
Provider.objects.get(pk=instance.id)
def test_delete_instance_does_not_exist(self):
def test_delete_provider_does_not_exist(self):
non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645"
with pytest.raises(ObjectDoesNotExist):
delete_provider(non_existent_pk)
@patch("api.db_router.MainRouter.admin_db", new="default")
@pytest.mark.django_db
class TestDeleteTenant:
def test_delete_tenant_success(self, tenants_fixture, providers_fixture):
"""
Test successful deletion of a tenant and its related data.
"""
tenant = tenants_fixture[0]
providers = Provider.objects.filter(tenant_id=tenant.id)
# Ensure the tenant and related providers exist before deletion
assert Tenant.objects.filter(id=tenant.id).exists()
assert providers.exists()
# Call the function and validate the result
deletion_summary = delete_tenant(tenant.id)
assert deletion_summary is not None
assert not Tenant.objects.filter(id=tenant.id).exists()
assert not Provider.objects.filter(tenant_id=tenant.id).exists()
def test_delete_tenant_with_no_providers(self, tenants_fixture):
"""
Test deletion of a tenant with no related providers.
"""
tenant = tenants_fixture[1] # Assume this tenant has no providers
providers = Provider.objects.filter(tenant_id=tenant.id)
# Ensure the tenant exists but has no related providers
assert Tenant.objects.filter(id=tenant.id).exists()
assert not providers.exists()
# Call the function and validate the result
deletion_summary = delete_tenant(tenant.id)
assert deletion_summary == {} # No providers, so empty summary
assert not Tenant.objects.filter(id=tenant.id).exists()

11
codecov.yml Normal file
View File

@@ -0,0 +1,11 @@
component_management:
individual_components:
- component_id: "prowler"
paths:
- "prowler/**"
- component_id: "api"
paths:
- "api/**"
comment:
layout: "header, diff, flags, components"

View File

@@ -37,7 +37,7 @@ services:
- 3000:3000
postgres:
image: postgres:16.3-alpine
image: postgres:16.3-alpine3.20
hostname: "postgres-db"
volumes:
- ./_data/postgres:/var/lib/postgresql/data

View File

@@ -25,7 +25,7 @@ services:
- ${UI_PORT:-3000}:${UI_PORT:-3000}
postgres:
image: postgres:16.3-alpine
image: postgres:16.3-alpine3.20
hostname: "postgres-db"
volumes:
- ./_data/postgres:/var/lib/postgresql/data

View File

@@ -51,14 +51,14 @@ For the AWS provider we have ways to test a Prowler check based on the following
We use and contribute to the [Moto](https://github.com/getmoto/moto) library which allows us to easily mock out tests based on AWS infrastructure. **It's awesome!**
- AWS API calls covered by [Moto](https://github.com/getmoto/moto):
- Service tests with `@mock_<service>`
- Checks tests with `@mock_<service>`
- Service tests with `@mock_aws`
- Checks tests with `@mock_aws`
- AWS API calls not covered by Moto:
- Service test with `mock_make_api_call`
- Checks tests with [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock)
- AWS API calls partially covered by Moto:
- Service test with `@mock_<service>` and `mock_make_api_call`
- Checks tests with `@mock_<service>` and `mock_make_api_call`
- Service test with `@mock_aws` and `mock_make_api_call`
- Checks tests with `@mock_aws` and `mock_make_api_call`
In the following section we are going to explain all of the above scenarios with examples. The main difference between those scenarios comes from if the [Moto](https://github.com/getmoto/moto) library covers the AWS API calls made by the service. You can check the covered API calls [here](https://github.com/getmoto/moto/blob/master/IMPLEMENTATION_COVERAGE.md).
@@ -70,7 +70,7 @@ This section is going to be divided based on the API coverage of the [Moto](http
#### API calls covered
If the [Moto](https://github.com/getmoto/moto) library covers the API calls we want to test, we can use the `@mock_<service>` decorator. This will mocked out all the API calls made to AWS keeping the state within the code decorated, in this case the test function.
If the [Moto](https://github.com/getmoto/moto) library covers the API calls we want to test, we can use the `@mock_aws` decorator. This will mocked out all the API calls made to AWS keeping the state within the code decorated, in this case the test function.
```python
# We need to import the unittest.mock to allow us to patch some objects
@@ -80,8 +80,8 @@ from unittest import mock
# Boto3 client and session to call the AWS APIs
from boto3 import client, session
# Moto decorator for the IAM service we want to mock
from moto import mock_iam
# Moto decorator
from moto import mock_aws
# Constants used
AWS_ACCOUNT_NUMBER = "123456789012"
@@ -91,10 +91,8 @@ AWS_REGION = "us-east-1"
# We always name the test classes like Test_<check_name>
class Test_iam_password_policy_uppercase:
# We include the Moto decorator for the service we want to use
# You can include more than one if two or more services are
# involved in test
@mock_iam
# We include the Moto decorator
@mock_aws
# We name the tests with test_<service>_<check_name>_<test_action>
def test_iam_password_policy_no_uppercase_flag(self):
# First, we have to create an IAM client
@@ -238,7 +236,7 @@ To do so, you need to mock the `botocore.client.BaseClient._make_api_call` funct
import boto3
import botocore
from unittest.mock import patch
from moto import mock_iam
from moto import mock_aws
# Original botocore _make_api_call function
orig = botocore.client.BaseClient._make_api_call

BIN
docs/img/compliance.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 274 KiB

View File

@@ -1,4 +1,4 @@
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
@@ -29,7 +29,7 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
Prowler App can be installed in different ways, depending on your environment:
> See how to use Prowler App in the [Prowler App](tutorials/prowler-app.md) section.
> See how to use Prowler App in the [Prowler App Tutorial](tutorials/prowler-app.md) section.
=== "Docker Compose"
@@ -65,6 +65,9 @@ Prowler App can be installed in different ways, depending on your environment:
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
???+ warning
Make sure to have `api/.env` and `ui/.env.local` files with the required environment variables. You can find the required environment variables in the [`api/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/api/.env.example) and [`ui/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/ui/.env.template) files.
_Commands to run the API_:
``` bash
@@ -95,6 +98,19 @@ Prowler App can be installed in different ways, depending on your environment:
python -m celery -A config.celery worker -l info -E
```
_Commands to run the API Scheduler_:
``` bash
git clone https://github.com/prowler-cloud/prowler \
cd prowler/api \
poetry install \
poetry shell \
set -a \
source .env \
cd src/backend \
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
```
_Commands to run the UI_:
``` bash
@@ -107,9 +123,6 @@ Prowler App can be installed in different ways, depending on your environment:
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
???+ warning
Make sure to have `api/.env` and `ui/.env.local` files with the required environment variables. You can find the required environment variables in the [`api/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/api/.env.example) and [`ui/.env.template`](https://github.com/prowler-cloud/prowler/blob/master/ui/.env.template) files.
???+ warning
Google and GitHub authentication is only available in [Prowler Cloud](https://prowler.com).
@@ -373,8 +386,8 @@ After successfully adding and testing your credentials, Prowler will start scann
#### **View Results**
While the scan is running, start exploring the findings in these sections:
- **Overview**: High-level summary of the scans. <img src="../../img/overview.png" alt="Overview" width="700"/>
- **Compliance**: Insights into compliance status. <img src="../../img/compliance.png" alt="Compliance" width="700"/>
- **Overview**: High-level summary of the scans. <img src="img/overview.png" alt="Overview" width="700"/>
- **Compliance**: Insights into compliance status. <img src="img/compliance.png" alt="Compliance" width="700"/>
> See more details about the Prowler App usage in the [Prowler App](tutorials/prowler-app.md) section.

View File

@@ -13,7 +13,7 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
## Reporting Vulnerabilities
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or Prowler SaaS service, please submit the information by contacting to us via [**support.prowler.com**](http://support.prowler.com).
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or Prowler Cloud service, please submit the information by contacting to us via [**support.prowler.com**](http://support.prowler.com).
The information you share with the Prowler team as part of this process is kept confidential within Prowler. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.

View File

@@ -22,32 +22,31 @@ In order to see which compliance frameworks are cover by Prowler, you can use op
```sh
prowler <provider> --list-compliance
```
Currently, the available frameworks are:
### AWS
- `aws_account_security_onboarding_aws`
- `aws_audit_manager_control_tower_guardrails_aws`
- `aws_foundational_security_best_practices_aws`
- `aws_foundational_technical_review_aws`
- `aws_well_architected_framework_reliability_pillar_aws`
- `aws_well_architected_framework_security_pillar_aws`
- `cis_1.4_aws`
- `cis_1.5_aws`
- `cis_2.0_aws`
- `cis_2.0_gcp`
- `cis_2.0_azure`
- `cis_2.1_azure`
- `cis_3.0_aws`
- `cis_1.8_kubernetes`
- `cisa_aws`
- `ens_rd2022_aws`
- `fedramp_low_revision_4_aws`
- `fedramp_moderate_revision_4_aws`
- `ffiec_aws`
- `aws_foundational_technical_review_aws`
- `gdpr_aws`
- `gxp_21_cfr_part_11_aws`
- `gxp_eu_annex_11_aws`
- `hipaa_aws`
- `iso27001_2013_aws`
- `kisa_isms_p_2023_aws`
- `kisa_isms_p_2023_korean_aws`
- `mitre_attack_aws`
- `nist_800_171_revision_2_aws`
- `nist_800_53_revision_4_aws`
@@ -57,6 +56,23 @@ Currently, the available frameworks are:
- `rbi_cyber_security_framework_aws`
- `soc2_aws`
### Azure
- `cis_2.0_azure`
- `cis_2.1_azure`
- `ens_rd2022_azure`
- `mitre_attack_azure`
### GCP
- `cis_2.0_gcp`
- `ens_rd2022_gcp`
- `mitre_attack_gcp`
### Kubernetes
- `cis_1.8_kubernetes`
## List Requirements of Compliance Frameworks
For each compliance framework, you can use option `--list-compliance-requirements` to list its requirements:
```sh

View File

@@ -75,6 +75,7 @@ The following list includes all the Azure checks with configurable variables tha
| `app_ensure_php_version_is_latest` | `php_latest_version` | String |
| `app_ensure_python_version_is_latest` | `python_latest_version` | String |
| `app_ensure_java_version_is_latest` | `java_latest_version` | String |
| `sqlserver_recommended_minimal_tls_version` | `recommended_minimal_tls_versions` | List of Strings |
## GCP
@@ -447,6 +448,14 @@ azure:
# azure.app_ensure_java_version_is_latest
java_latest_version: "17"
# Azure SQL Server
# azure.sqlserver_minimal_tls_version
recommended_minimal_tls_versions:
[
"1.2",
"1.3"
]
# GCP Configuration
gcp:
# GCP Compute Configuration

View File

@@ -5,6 +5,9 @@ The **Prowler App** is a user-friendly interface for the Prowler CLI, providing
After [installing](../index.md#prowler-app-installation) the **Prowler App**, access it at [http://localhost:3000](http://localhost:3000).
You can also access to the auto-generated **Prowler API** documentation at [http://localhost:8080/api/v1/docs](http://localhost:8080/api/v1/docs) to see all the available endpoints, parameters and responses.
???+ note
If you are a [Prowler Cloud](https://cloud.prowler.com/sign-in) user you can see API docs at [https://api.prowler.com/api/v1/docs](https://api.prowler.com/api/v1/docs)
## **Step 1: Sign Up**
To get started, sign up using your email and password:

View File

@@ -34,7 +34,6 @@ theme:
icon: material/weather-sunny
name: Switch to light mode
plugins:
- search
- git-revision-date-localized:
@@ -112,7 +111,7 @@ nav:
- Contact Us: contact.md
- Troubleshooting: troubleshooting.md
- About: about.md
- Prowler SaaS: https://prowler.com
- Prowler Cloud: https://prowler.com
# Customization
extra:

199
poetry.lock generated
View File

@@ -775,17 +775,17 @@ files = [
[[package]]
name = "boto3"
version = "1.35.71"
version = "1.35.78"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "boto3-1.35.71-py3-none-any.whl", hash = "sha256:e2969a246bb3208122b3c349c49cc6604c6fc3fc2b2f65d99d3e8ccd745b0c16"},
{file = "boto3-1.35.71.tar.gz", hash = "sha256:3ed7172b3d4fceb6218bb0ec3668c4d40c03690939c2fca4f22bb875d741a07f"},
{file = "boto3-1.35.78-py3-none-any.whl", hash = "sha256:5ef7166fe5060637b92af8dc152cd7acecf96b3fc9c5456706a886cadb534391"},
{file = "boto3-1.35.78.tar.gz", hash = "sha256:fc8001519c8842e766ad3793bde3fbd0bb39e821a582fc12cf67876b8f3cf7f1"},
]
[package.dependencies]
botocore = ">=1.35.71,<1.36.0"
botocore = ">=1.35.78,<1.36.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -794,13 +794,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.35.71"
version = "1.35.79"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
{file = "botocore-1.35.71-py3-none-any.whl", hash = "sha256:fc46e7ab1df3cef66dfba1633f4da77c75e07365b36f03bd64a3793634be8fc1"},
{file = "botocore-1.35.71.tar.gz", hash = "sha256:f9fa058e0393660c3fe53c1e044751beb64b586def0bd2212448a7c328b0cbba"},
{file = "botocore-1.35.79-py3-none-any.whl", hash = "sha256:e6b10bb9a357e3f5ca2e60f6dd15a85d311b9a476eb21b3c0c2a3b364a2897c8"},
{file = "botocore-1.35.79.tar.gz", hash = "sha256:245bfdda1b1508539ddd1819c67a8a2cc81780adf0715d3de418d64c4247f346"},
]
[package.dependencies]
@@ -1099,73 +1099,73 @@ files = [
[[package]]
name = "coverage"
version = "7.6.8"
version = "7.6.9"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.9"
files = [
{file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"},
{file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"},
{file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"},
{file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"},
{file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"},
{file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"},
{file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"},
{file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"},
{file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"},
{file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"},
{file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"},
{file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"},
{file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"},
{file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"},
{file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"},
{file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"},
{file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"},
{file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"},
{file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"},
{file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"},
{file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"},
{file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"},
{file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"},
{file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"},
{file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"},
{file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"},
{file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"},
{file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"},
{file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"},
{file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"},
{file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"},
{file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"},
{file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"},
{file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"},
{file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"},
{file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"},
{file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"},
{file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"},
{file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"},
{file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"},
{file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"},
{file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"},
{file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"},
{file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"},
{file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"},
{file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"},
{file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"},
{file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"},
{file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"},
{file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"},
{file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"},
{file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"},
{file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"},
{file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"},
{file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"},
{file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"},
{file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"},
{file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"},
{file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"},
{file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"},
{file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"},
{file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"},
{file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"},
{file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"},
{file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"},
{file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"},
{file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"},
{file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"},
{file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"},
{file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"},
{file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"},
{file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"},
{file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"},
{file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"},
{file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"},
{file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"},
{file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"},
{file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"},
{file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"},
{file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"},
{file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"},
{file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"},
{file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"},
{file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"},
{file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"},
{file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"},
{file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"},
{file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"},
]
[package.dependencies]
@@ -1719,13 +1719,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
[[package]]
name = "google-api-python-client"
version = "2.154.0"
version = "2.155.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "google_api_python_client-2.154.0-py2.py3-none-any.whl", hash = "sha256:a521bbbb2ec0ba9d6f307cdd64ed6e21eeac372d1bd7493a4ab5022941f784ad"},
{file = "google_api_python_client-2.154.0.tar.gz", hash = "sha256:1b420062e03bfcaa1c79e2e00a612d29a6a934151ceb3d272fe150a656dc8f17"},
{file = "google_api_python_client-2.155.0-py2.py3-none-any.whl", hash = "sha256:83fe9b5aa4160899079d7c93a37be306546a17e6686e2549bcc9584f1a229747"},
{file = "google_api_python_client-2.155.0.tar.gz", hash = "sha256:25529f89f0d13abcf3c05c089c423fb2858ac16e0b3727543393468d0d7af67c"},
]
[package.dependencies]
@@ -2403,13 +2403,13 @@ files = [
[[package]]
name = "microsoft-kiota-abstractions"
version = "1.6.2"
version = "1.6.6"
description = "Core abstractions for kiota generated libraries in Python"
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "microsoft_kiota_abstractions-1.6.2-py3-none-any.whl", hash = "sha256:8c2c777748e80f17dba3809b5d149585d9918198f0f94125e87432f7165ba80e"},
{file = "microsoft_kiota_abstractions-1.6.2.tar.gz", hash = "sha256:dec30f0fb427a051003e94b5c6fcf266f4702ecbd9d6961e3966124b9cbe41bf"},
{file = "microsoft_kiota_abstractions-1.6.6-py3-none-any.whl", hash = "sha256:29071715baf0d604c381c5d17be47f35e6e63a441dcfb5e9141963406b469d50"},
{file = "microsoft_kiota_abstractions-1.6.6.tar.gz", hash = "sha256:2554495b00c9c25b43f6964a71b65c89a277bd6b50f4d0028a7febcec6c4fd67"},
]
[package.dependencies]
@@ -2583,13 +2583,13 @@ dev = ["click", "codecov", "mkdocs-gen-files", "mkdocs-git-authors-plugin", "mkd
[[package]]
name = "mkdocs-material"
version = "9.5.46"
version = "9.5.48"
description = "Documentation that simply works"
optional = false
python-versions = ">=3.8"
files = [
{file = "mkdocs_material-9.5.46-py3-none-any.whl", hash = "sha256:98f0a2039c62e551a68aad0791a8d41324ff90c03a6e6cea381a384b84908b83"},
{file = "mkdocs_material-9.5.46.tar.gz", hash = "sha256:ae2043f4238e572f9a40e0b577f50400d6fc31e2fef8ea141800aebf3bd273d7"},
{file = "mkdocs_material-9.5.48-py3-none-any.whl", hash = "sha256:b695c998f4b939ce748adbc0d3bff73fa886a670ece948cf27818fa115dc16f8"},
{file = "mkdocs_material-9.5.48.tar.gz", hash = "sha256:a582531e8b34f4c7ed38c29d5c44763053832cf2a32f7409567e0c74749a47db"},
]
[package.dependencies]
@@ -2769,13 +2769,13 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.12.0"
version = "1.14.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.8"
files = [
{file = "msgraph_sdk-1.12.0-py3-none-any.whl", hash = "sha256:ac298b546b240391b0e407379d039db32862a56d6fe15cf7c5f7e77631fc6771"},
{file = "msgraph_sdk-1.12.0.tar.gz", hash = "sha256:fbb5a8a9f6eed89b496f207eb35b6b4cfc7fefa75608aeef07477a3b2276d4fa"},
{file = "msgraph_sdk-1.14.0-py3-none-any.whl", hash = "sha256:1a2f327dc8fbe5a5e6d0d84cf71d605e7b118b3066b1e16f011ccd8fd927bb03"},
{file = "msgraph_sdk-1.14.0.tar.gz", hash = "sha256:5bbda80941c5d1794682753b8b291bd2ebed719a43d6de949fd0cd613b6dfbbd"},
]
[package.dependencies]
@@ -3796,17 +3796,17 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
[[package]]
name = "pylint"
version = "3.3.1"
version = "3.3.2"
description = "python code static checker"
optional = false
python-versions = ">=3.9.0"
files = [
{file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"},
{file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"},
{file = "pylint-3.3.2-py3-none-any.whl", hash = "sha256:77f068c287d49b8683cd7c6e624243c74f92890f767f106ffa1ddf3c0a54cb7a"},
{file = "pylint-3.3.2.tar.gz", hash = "sha256:9ec054ec992cd05ad30a6df1676229739a73f8feeabf3912c995d17601052b01"},
]
[package.dependencies]
astroid = ">=3.3.4,<=3.4.0-dev0"
astroid = ">=3.3.5,<=3.4.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
@@ -3858,13 +3858,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "8.3.3"
version = "8.3.4"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
{file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
{file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
]
[package.dependencies]
@@ -4458,6 +4458,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -4466,6 +4467,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -4474,6 +4476,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -4482,6 +4485,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -4490,6 +4494,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
@@ -4638,17 +4643,17 @@ files = [
[[package]]
name = "slack-sdk"
version = "3.33.4"
version = "3.33.5"
description = "The Slack API Platform SDK for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "slack_sdk-3.33.4-py2.py3-none-any.whl", hash = "sha256:9f30cb3c9c07b441c49d53fc27f9f1837ad1592a7e9d4ca431f53cdad8826cc6"},
{file = "slack_sdk-3.33.4.tar.gz", hash = "sha256:5e109847f6b6a22d227609226ba4ed936109dc00675bddeb7e0bee502d3ee7e0"},
{file = "slack_sdk-3.33.5-py2.py3-none-any.whl", hash = "sha256:b8cccadfa3d4005a5e6529f52000d25c583f46173fda8e9136fdd2bc58923ff6"},
{file = "slack_sdk-3.33.5.tar.gz", hash = "sha256:a5e74c00c99dc844ad93e501ab764a20d86fa8184bbc9432af217496f632c4ee"},
]
[package.extras]
optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<14)"]
optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<15)"]
[[package]]
name = "smmap"
@@ -4888,13 +4893,13 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "vulture"
version = "2.13"
version = "2.14"
description = "Find dead code"
optional = false
python-versions = ">=3.8"
files = [
{file = "vulture-2.13-py2.py3-none-any.whl", hash = "sha256:34793ba60488e7cccbecdef3a7fe151656372ef94fdac9fe004c52a4000a6d44"},
{file = "vulture-2.13.tar.gz", hash = "sha256:78248bf58f5eaffcc2ade306141ead73f437339950f80045dce7f8b078e5a1aa"},
{file = "vulture-2.14-py2.py3-none-any.whl", hash = "sha256:d9a90dba89607489548a49d557f8bac8112bd25d3cbc8aeef23e860811bd5ed9"},
{file = "vulture-2.14.tar.gz", hash = "sha256:cb8277902a1138deeab796ec5bef7076a6e0248ca3607a3f3dee0b6d9e9b8415"},
]
[package.dependencies]
@@ -5194,4 +5199,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.13"
content-hash = "1acc901866ecfc2c0f3576b9e442d7a3b6e6522cac3d4d1b9301ed4232755cba"
content-hash = "e00da6013a01923ac8e79017e7fdb221e09a3dcf581ad8d74e39550be64cc2f3"

View File

@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "5.0.0"
prowler_version = "5.1.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -388,6 +388,14 @@ azure:
# azure.app_ensure_java_version_is_latest
java_latest_version: "17"
# Azure SQL Server
# azure.sqlserver_minimal_tls_version
recommended_minimal_tls_versions:
[
"1.2",
"1.3",
]
# GCP Configuration
gcp:
# GCP Compute Configuration

View File

@@ -94,11 +94,12 @@ def get_cis_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -95,11 +95,12 @@ def get_ens_table(
print(
f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) NO CUMPLE{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) CUMPLE{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) NO CUMPLE{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) CUMPLE{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -39,11 +39,12 @@ def get_generic_compliance_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -61,11 +61,12 @@ def get_kisa_ismsp_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -69,11 +69,12 @@ def get_mitre_attack_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -7315,6 +7315,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -11097,6 +11098,7 @@
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",

View File

@@ -8,19 +8,20 @@ class autoscaling_group_launch_configuration_no_public_ip(Check):
def execute(self):
findings = []
for group in autoscaling_client.groups:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "PASS"
report.status_extended = f"Autoscaling group {group.name} does not have an associated launch configuration assigning a public IP address."
for lc in autoscaling_client.launch_configurations.values():
if lc.name == group.launch_configuration_name and lc.public_ip:
report.status = "FAIL"
report.status_extended = f"Autoscaling group {group.name} has an associated launch configuration assigning a public IP address."
if lc.name == group.launch_configuration_name:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "PASS"
report.status_extended = f"Autoscaling group {group.name} does not have an associated launch configuration assigning a public IP address."
findings.append(report)
if lc.public_ip:
report.status = "FAIL"
report.status_extended = f"Autoscaling group {group.name} has an associated launch configuration assigning a public IP address."
findings.append(report)
return findings

View File

@@ -8,20 +8,17 @@ class autoscaling_group_launch_configuration_requires_imdsv2(Check):
def execute(self):
findings = []
for group in autoscaling_client.groups:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "FAIL"
report.status_extended = (
f"Autoscaling group {group.name} has IMDSv2 disabled or not required."
)
for (
launch_configuration
) in autoscaling_client.launch_configurations.values():
if launch_configuration.name == group.launch_configuration_name:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "FAIL"
report.status_extended = f"Autoscaling group {group.name} has IMDSv2 disabled or not required."
if (
launch_configuration.http_endpoint == "enabled"
and launch_configuration.http_tokens == "required"
@@ -32,6 +29,6 @@ class autoscaling_group_launch_configuration_requires_imdsv2(Check):
report.status = "PASS"
report.status_extended = f"Autoscaling group {group.name} has metadata service disabled."
findings.append(report)
findings.append(report)
return findings

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_inside_vpc",
"CheckTitle": "Ensure AWS Lambda Functions Are Deployed Inside a VPC",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "low",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
"CheckTitle": "Check if Lambda functions invoke API operations are being recorded by CloudTrail.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "low",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_no_secrets_in_code",
"CheckTitle": "Find secrets in Lambda functions code.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "critical",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_no_secrets_in_variables",
"CheckTitle": "Find secrets in Lambda functions variables.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "critical",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_not_publicly_accessible",
"CheckTitle": "Check if Lambda functions have resource-based policy set as Public.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "critical",

View File

@@ -0,0 +1,61 @@
import json
from prowler.lib.logger import logger
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
def fixer(resource_id: str, region: str) -> bool:
"""
Remove the Lambda function's resource-based policy to prevent public access and add a new permission for the account.
Specifically, this fixer deletes all permission statements associated with the Lambda function's policy and then adds a new permission.
Requires the lambda:RemovePermission and lambda:AddPermission permissions.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "lambda:RemovePermission",
"Resource": "*"
},
{
"Effect": "Allow",
"Action": "lambda:AddPermission",
"Resource": "*"
}
]
}
Args:
resource_id (str): The Lambda function name or ARN.
region (str): AWS region where the Lambda function exists.
Returns:
bool: True if the operation is successful (policy removed and permission added), False otherwise.
"""
try:
account_id = awslambda_client.audited_account
regional_client = awslambda_client.regional_clients[region]
policy_response = regional_client.get_policy(FunctionName=resource_id)
policy = json.loads(policy_response.get("Policy"))
for statement in policy.get("Statement", []):
statement_id = statement.get("Sid")
if statement_id:
regional_client.remove_permission(
FunctionName=resource_id, StatementId=statement_id
)
regional_client.add_permission(
FunctionName=resource_id,
StatementId="ProwlerFixerStatement",
Principal=account_id,
Action="lambda:InvokeFunction",
)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_url_cors_policy",
"CheckTitle": "Check Lambda Function URL CORS configuration.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "medium",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_url_public",
"CheckTitle": "Check Public Lambda Function URL.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "high",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_using_supported_runtimes",
"CheckTitle": "Find obsolete Lambda runtimes.",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "medium",

View File

@@ -3,7 +3,7 @@
"CheckID": "awslambda_function_vpc_multi_az",
"CheckTitle": "Check if AWS Lambda Function VPC is deployed Across Multiple Availability Zones",
"CheckType": [],
"ServiceName": "lambda",
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "medium",

View File

@@ -183,21 +183,26 @@ class Backup(AWSService):
def _list_recovery_points(self, regional_client):
logger.info("Backup - Listing Recovery Points...")
try:
for backup_vault in self.backup_vaults:
paginator = regional_client.get_paginator(
"list_recovery_points_by_backup_vault"
)
for page in paginator.paginate(BackupVaultName=backup_vault.name):
for recovery_point in page.get("RecoveryPoints", []):
self.recovery_points.append(
RecoveryPoint(
arn=recovery_point.get("RecoveryPointArn"),
backup_vault_name=backup_vault.name,
encrypted=recovery_point.get("IsEncrypted", False),
backup_vault_region=backup_vault.region,
tags=[],
)
)
if self.backup_vaults:
for backup_vault in self.backup_vaults:
paginator = regional_client.get_paginator(
"list_recovery_points_by_backup_vault"
)
for page in paginator.paginate(BackupVaultName=backup_vault.name):
for recovery_point in page.get("RecoveryPoints", []):
arn = recovery_point.get("RecoveryPointArn")
if arn:
self.recovery_points.append(
RecoveryPoint(
arn=arn,
backup_vault_name=backup_vault.name,
encrypted=recovery_point.get(
"IsEncrypted", False
),
backup_vault_region=backup_vault.region,
tags=[],
)
)
except ClientError as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -67,10 +67,8 @@ class cloudtrail_threat_detection_enumeration(Check):
found_potential_enumeration = True
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.resource_id = aws_identity_arn.split("/")[-1]
report.resource_arn = aws_identity_arn
report.status = "FAIL"
report.status_extended = f"Potential enumeration attack detected from AWS {aws_identity_type} {aws_identity_arn.split('/')[-1]} with an threshold of {identity_threshold}."
findings.append(report)

View File

@@ -67,10 +67,8 @@ class cloudtrail_threat_detection_llm_jacking(Check):
found_potential_llm_jacking = True
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.resource_id = aws_identity_arn.split("/")[-1]
report.resource_arn = aws_identity_arn
report.status = "FAIL"
report.status_extended = f"Potential LLM Jacking attack detected from AWS {aws_identity_type} {aws_identity_arn.split('/')[-1]} with an threshold of {identity_threshold}."
findings.append(report)

View File

@@ -69,10 +69,8 @@ class cloudtrail_threat_detection_privilege_escalation(Check):
found_potential_privilege_escalation = True
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.resource_id = aws_identity_arn.split("/")[-1]
report.resource_arn = aws_identity_arn
report.status = "FAIL"
report.status_extended = f"Potential privilege escalation attack detected from AWS {aws_identity_type} {aws_identity_arn.split('/')[-1]} with an threshold of {identity_threshold}."
findings.append(report)

View File

@@ -8,7 +8,7 @@
"ServiceName": "ec2",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "medium",
"Severity": "high",
"ResourceType": "AwsEc2Instance",
"Description": "Ensure Instance Metadata Service Version 2 (IMDSv2) is enforced for EC2 instances at the account level to protect against SSRF vulnerabilities.",
"Risk": "EC2 instances that use IMDSv1 are vulnerable to SSRF attacks.",

View File

@@ -8,7 +8,7 @@
"ServiceName": "ec2",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"Severity": "high",
"ResourceType": "AwsEc2Instance",
"Description": "Check if EC2 Instance Metadata Service Version 2 (IMDSv2) is Enabled and Required.",
"Risk": "Using IMDSv2 will protect from misconfiguration and SSRF vulnerabilities. IMDSv1 will not.",

View File

@@ -0,0 +1,51 @@
from prowler.lib.logger import logger
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.security_groups import check_security_group
def fixer(resource_id: str, region: str) -> bool:
"""
Revokes any ingress rule allowing CIFS ports (139, 445) from any address (0.0.0.0/0)
for the EC2 instance's security groups.
This fixer will only be triggered if the check identifies CIFS ports open to the Internet.
Requires the ec2:RevokeSecurityGroupIngress permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "ec2:RevokeSecurityGroupIngress",
"Resource": "*"
}
]
}
Args:
resource_id (str): The EC2 instance ID.
region (str): The AWS region where the EC2 instance exists.
Returns:
bool: True if the operation is successful (ingress rule revoked), False otherwise.
"""
try:
regional_client = ec2_client.regional_clients[region]
check_ports = [139, 445]
for instance in ec2_client.instances:
if instance.id == resource_id:
for sg in ec2_client.security_groups.values():
if sg.id in instance.security_groups:
for ingress_rule in sg.ingress_rules:
if check_security_group(
ingress_rule, "tcp", check_ports, any_address=True
):
regional_client.revoke_security_group_ingress(
GroupId=sg.id,
IpPermissions=[ingress_rule],
)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -0,0 +1,32 @@
{
"Provider": "aws",
"CheckID": "ec2_launch_template_imdsv2_required",
"CheckTitle": "Amazon EC2 launch templates should have IMDSv2 enabled and required.",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"ServiceName": "ec2",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:ec2:region:account-id:launch-template/resource-id",
"Severity": "high",
"ResourceType": "AwsEc2LaunchTemplate",
"Description": "This control checks if Amazon EC2 launch templates are configured with IMDSv2 enabled and required. The control fails if IMDSv2 is not enabled or required in the launch template versions.",
"Risk": "Without IMDSv2 required, EC2 instances may be vulnerable to metadata service attacks, allowing unauthorized access to instance metadata, potentially leading to compromise of instance credentials or other sensitive data.",
"RelatedUrl": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html",
"Remediation": {
"Code": {
"CLI": "aws ec2 modify-launch-template --launch-template-id <template-id> --version <version-number> --metadata-options HttpTokens=required",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-170",
"Terraform": ""
},
"Recommendation": {
"Text": "To ensure EC2 launch templates have IMDSv2 enabled and required, update the template to configure the Instance Metadata Service Version 2 as required.",
"Url": "https://docs.aws.amazon.com/autoscaling/ec2/userguide/create-launch-template.html#change-metadata-options"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,42 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
class ec2_launch_template_imdsv2_required(Check):
def execute(self):
findings = []
for template in ec2_client.launch_templates:
report = Check_Report_AWS(self.metadata())
report.region = template.region
report.resource_id = template.id
report.resource_arn = template.arn
report.resource_tags = template.tags
versions_with_imdsv2_required = []
versions_with_metadata_disabled = []
versions_with_no_imdsv2 = []
for version in template.versions:
if (
version.template_data.http_endpoint == "enabled"
and version.template_data.http_tokens == "required"
):
versions_with_imdsv2_required.append(str(version.version_number))
elif version.template_data.http_endpoint == "disabled":
versions_with_metadata_disabled.append(str(version.version_number))
else:
versions_with_no_imdsv2.append(str(version.version_number))
if versions_with_imdsv2_required:
report.status = "PASS"
report.status_extended = f"EC2 Launch Template {template.name} has IMDSv2 enabled and required in the following versions: {', '.join(versions_with_imdsv2_required)}."
elif versions_with_metadata_disabled:
report.status = "PASS"
report.status_extended = f"EC2 Launch Template {template.name} has metadata service disabled in the following versions: {', '.join(versions_with_metadata_disabled)}."
else:
report.status = "FAIL"
report.status_extended = f"EC2 Launch Template {template.name} has IMDSv2 disabled or not required in the following versions: {', '.join(versions_with_no_imdsv2)}."
findings.append(report)
return findings

View File

@@ -10,7 +10,6 @@ from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.lib.service.service import AWSService
################## EC2
class EC2(AWSService):
def __init__(self, provider):
# Call AWSService's __init__
@@ -569,6 +568,12 @@ class EC2(AWSService):
),
network_interfaces=enis,
associate_public_ip_address=associate_public_ip,
http_tokens=template_version["LaunchTemplateData"]
.get("MetadataOptions", {})
.get("HttpTokens", ""),
http_endpoint=template_version["LaunchTemplateData"]
.get("MetadataOptions", {})
.get("HttpEndpoint", ""),
),
)
)
@@ -763,6 +768,8 @@ class TemplateData(BaseModel):
user_data: str
network_interfaces: Optional[list[NetworkInterface]]
associate_public_ip_address: Optional[bool]
http_tokens: Optional[str]
http_endpoint: Optional[str]
class LaunchTemplateVersion(BaseModel):

View File

@@ -0,0 +1,38 @@
from prowler.lib.logger import logger
from prowler.providers.aws.services.ecr.ecr_client import ecr_client
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the ECR repository's policy to remove public access.
Specifically, this fixer delete the policy that had public access.
Requires the ecr:DeleteRepositoryPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "ecr:DeleteRepositoryPolicy",
"Resource": "*"
}
]
}
Args:
resource_id (str): The ECR repository name.
region (str): AWS region where the ECR repository exists.
Returns:
bool: True if the operation is successful (policy updated), False otherwise.
"""
try:
regional_client = ecr_client.regional_clients[region]
regional_client.delete_repository_policy(repositoryName=resource_id)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -31,10 +31,7 @@ class firehose_stream_encrypted_at_rest(Check):
f"Firehose Stream {stream.name} does have at rest encryption enabled."
)
if (
stream.kms_encryption != EncryptionStatus.ENABLED
or not stream.kms_key_arn
):
if stream.kms_encryption != EncryptionStatus.ENABLED:
report.status = "FAIL"
report.status_extended = f"Firehose Stream {stream.name} does not have at rest encryption enabled."

View File

@@ -0,0 +1,38 @@
from prowler.lib.logger import logger
from prowler.providers.aws.services.glacier.glacier_client import glacier_client
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the Glacier vault's policy to remove public access.
Specifically, this fixer delete the vault policy that has public access.
Requires the glacier:DeleteVaultAccessPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "glacier:DeleteVaultAccessPolicy",
"Resource": "*"
}
]
}
Args:
resource_id (str): The Glacier vault name.
region (str): AWS region where the Glacier vault exists.
Returns:
bool: True if the operation is successful (policy updated), False otherwise.
"""
try:
regional_client = glacier_client.regional_clients[region]
regional_client.delete_vault_access_policy(vaultName=resource_id)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -49,7 +49,7 @@ class iam_rotate_access_key_90_days(Check):
old_access_keys = True
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_id = f"{user['user']}-access-key-1"
report.resource_arn = user["arn"]
report.resource_tags = user_tags
report.status = "FAIL"
@@ -66,7 +66,7 @@ class iam_rotate_access_key_90_days(Check):
old_access_keys = True
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_id = f"{user['user']}-access-key-2"
report.resource_arn = user["arn"]
report.resource_tags = user_tags
report.status = "FAIL"

View File

@@ -0,0 +1,43 @@
from prowler.lib.logger import logger
from prowler.providers.aws.services.opensearch.opensearch_client import (
opensearch_client,
)
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the OpenSearch domain's resource-based policy to remove public access.
Specifically, this fixer update the domain config and add an empty policy to remove the old one.
Requires the es:UpdateDomainConfig permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "es:UpdateDomainConfig",
"Resource": "*"
}
]
}
Args:
resource_id (str): The OpenSearch domain name.
region (str): AWS region where the OpenSearch domain exists.
Returns:
bool: True if the operation is successful (policy updated), False otherwise.
"""
try:
regional_client = opensearch_client.regional_clients[region]
regional_client.update_domain_config(
DomainName=resource_id,
AccessPolicies="",
)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -6,8 +6,8 @@ class rds_cluster_non_default_port(Check):
def execute(self):
findings = []
default_ports = {
3306: ["mysql", "mariadb"],
5432: ["postgres"],
3306: ["mysql", "mariadb", "aurora-mysql"],
5432: ["postgres", "aurora-postgresql"],
1521: ["oracle"],
1433: ["sqlserver"],
50000: ["db2"],

View File

@@ -37,18 +37,21 @@ class rds_instance_no_public_access(Check):
):
report.status_extended = f"RDS Instance {db_instance.id} is set as publicly accessible and security group {security_group.name} ({security_group.id}) has {db_instance.engine} port {db_instance_port} open to the Internet at endpoint {db_instance.endpoint.get('Address')} but is not in a public subnet."
public_sg = True
if db_instance.subnet_ids:
for subnet_id in db_instance.subnet_ids:
if (
subnet_id in vpc_client.vpc_subnets
and vpc_client.vpc_subnets[
subnet_id
].public
):
report.status = "FAIL"
report.status_extended = f"RDS Instance {db_instance.id} is set as publicly accessible and security group {security_group.name} ({security_group.id}) has {db_instance.engine} port {db_instance_port} open to the Internet at endpoint {db_instance.endpoint.get('Address')} in a public subnet {subnet_id}."
break
if public_sg:
break
if public_sg:
break
if db_instance.subnet_ids:
for subnet_id in db_instance.subnet_ids:
if (
subnet_id in vpc_client.vpc_subnets
and vpc_client.vpc_subnets[subnet_id].public
):
report.status = "FAIL"
report.status_extended = f"RDS Instance {db_instance.id} is set as publicly accessible and security group {security_group.name} ({security_group.id}) has {db_instance.engine} port {db_instance_port} open to the Internet at endpoint {db_instance.endpoint.get('Address')} in a public subnet {subnet_id}."
break
findings.append(report)

View File

@@ -6,8 +6,8 @@ class rds_instance_non_default_port(Check):
def execute(self):
findings = []
default_ports = {
3306: ["mysql", "mariadb"],
5432: ["postgres"],
3306: ["mysql", "mariadb", "aurora-mysql"],
5432: ["postgres", "aurora-postgresql"],
1521: ["oracle"],
1433: ["sqlserver"],
50000: ["db2"],

View File

@@ -0,0 +1,46 @@
from prowler.lib.logger import logger
from prowler.providers.aws.services.s3.s3_client import s3_client
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the S3 bucket's public access settings to block all public access.
Specifically, this fixer configures the bucket's public access block settings to
prevent any public access (ACLs and policies). Requires the s3:PutBucketPublicAccessBlock
permission to modify the public access settings.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "s3:PutBucketPublicAccessBlock",
"Resource": "*"
}
]
}
Args:
resource_id (str): The S3 bucket name.
region (str): AWS region where the S3 bucket exists.
Returns:
bool: True if the operation is successful (public access is blocked),
False otherwise.
"""
try:
regional_client = s3_client.regional_clients[region]
regional_client.put_public_access_block(
Bucket=resource_id,
PublicAccessBlockConfiguration={
"BlockPublicAcls": True,
"IgnorePublicAcls": True,
"BlockPublicPolicy": True,
"RestrictPublicBuckets": True,
},
)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -0,0 +1,40 @@
from prowler.lib.logger import logger
from prowler.providers.aws.services.s3.s3_client import s3_client
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the S3 bucket ACL to restrict public read access.
Specifically, this fixer sets the ACL of the bucket to 'private' to prevent
any public access to the S3 bucket.
Requires the s3:PutBucketAcl permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "s3:PutBucketAcl",
"Resource": "*"
}
]
}
Args:
resource_id (str): The S3 bucket name.
region (str): AWS region where the S3 bucket exists.
Returns:
bool: True if the operation is successful (bucket access is updated), False otherwise.
"""
try:
regional_client = s3_client.regional_clients[region]
regional_client.put_bucket_acl(Bucket=resource_id, ACL="private")
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -0,0 +1,36 @@
from prowler.lib.logger import logger
from prowler.providers.aws.services.s3.s3_client import s3_client
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the S3 bucket ACL to restrict public write access.
Specifically, this fixer sets the ACL of the bucket to 'private' to prevent
public write access to the S3 bucket. Requires the s3:PutBucketAcl permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "s3:PutBucketAcl",
"Resource": "*"
}
]
}
Args:
resource_id (str): The S3 bucket id.
region (str): AWS region where the S3 bucket exists.
Returns:
bool: True if the operation is successful (bucket access is updated), False otherwise.
"""
try:
regional_client = s3_client.regional_clients[region]
regional_client.put_bucket_acl(Bucket=resource_id, ACL="private")
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -0,0 +1,64 @@
import json
from prowler.lib.logger import logger
from prowler.providers.aws.services.sqs.sqs_client import sqs_client
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the SQS queue's resource-based policy to remove public access and replace with trusted account access.
Specifically, this fixer checks if any statement has a public Principal (e.g., "*" or "CanonicalUser")
and replaces it with the ARN of the trusted AWS account.
Requires the sqs:SetQueueAttributes permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "sqs:SetQueueAttributes",
"Resource": "*"
}
]
}
Args:
resource_id (str): The SQS queue name or ARN.
region (str): AWS region where the SQS queue exists.
Returns:
bool: True if the operation is successful (policy updated), False otherwise.
"""
try:
account_id = sqs_client.audited_account
audited_partition = sqs_client.audited_partition
regional_client = sqs_client.regional_clients[region]
queue_name = resource_id.split("/")[-1]
trusted_policy = {
"Version": "2012-10-17",
"Statement": [
{
"Sid": "ProwlerFixerStatement",
"Effect": "Allow",
"Principal": {
"AWS": account_id,
},
"Action": "sqs:*",
"Resource": f"arn:{audited_partition}:sqs:{region}:{account_id}:{queue_name}",
}
],
}
regional_client.set_queue_attributes(
QueueUrl=resource_id,
Attributes={"Policy": json.dumps(trusted_policy)},
)
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return False
else:
return True

View File

@@ -150,6 +150,22 @@ class WAFv2(AWSService):
else:
acl.rules.append(new_rule)
firewall_manager_managed_rg = get_web_acl.get("WebACL", {}).get(
"PreProcessFirewallManagerRuleGroups", []
) + get_web_acl.get("WebACL", {}).get(
"PostProcessFirewallManagerRuleGroups", []
)
for rule in firewall_manager_managed_rg:
acl.rule_groups.append(
Rule(
name=rule.get("Name", ""),
cloudwatch_metrics_enabled=rule.get(
"VisibilityConfig", {}
).get("CloudWatchMetricsEnabled", False),
)
)
except Exception as error:
logger.error(
f"{acl.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -193,13 +209,6 @@ class Rule(BaseModel):
cloudwatch_metrics_enabled: bool = False
class FirewallManagerRuleGroup(BaseModel):
"""Model representing a rule group for the Web ACL."""
name: str
cloudwatch_metrics_enabled: bool = False
class WebAclv2(BaseModel):
"""Model representing a Web ACL for WAFv2."""

View File

@@ -19,12 +19,11 @@ class app_minimum_tls_version_12(Check):
report.location = app.location
report.status_extended = f"Minimum TLS version is not set to 1.2 for app '{app_name}' in subscription '{subscription_name}'."
if (
app.configurations
and getattr(app.configurations, "min_tls_version", "") == "1.2"
):
if app.configurations and getattr(
app.configurations, "min_tls_version", ""
) in ["1.2", "1.3"]:
report.status = "PASS"
report.status_extended = f"Minimum TLS version is set to 1.2 for app '{app_name}' in subscription '{subscription_name}'."
report.status_extended = f"Minimum TLS version is set to {app.configurations.min_tls_version} for app '{app_name}' in subscription '{subscription_name}'."
findings.append(report)

Some files were not shown because too many files have changed in this diff Show More