mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
344 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27329457be | ||
|
|
7189f3d526 | ||
|
|
58e7589c9d | ||
|
|
d60f4b5ded | ||
|
|
4c2ec094f6 | ||
|
|
395ecaff5b | ||
|
|
c39506ef7d | ||
|
|
eb90d479e2 | ||
|
|
b92a73f5ea | ||
|
|
ad121f3059 | ||
|
|
70e4c5a44e | ||
|
|
b5a46b7b59 | ||
|
|
f1a97cd166 | ||
|
|
0774508093 | ||
|
|
0664ce6b94 | ||
|
|
407c779c52 | ||
|
|
c60f13f23f | ||
|
|
37d912ef01 | ||
|
|
d3de89c017 | ||
|
|
cb22af25c6 | ||
|
|
a534b94495 | ||
|
|
6262b4ff0b | ||
|
|
84ecd7ab2c | ||
|
|
1a5428445a | ||
|
|
ac8e991ca0 | ||
|
|
83a0331472 | ||
|
|
cce31e2971 | ||
|
|
0adf7d6e77 | ||
|
|
295f8b557e | ||
|
|
bb2c5c3161 | ||
|
|
0018f36a36 | ||
|
|
857de84f49 | ||
|
|
9630f2242a | ||
|
|
1fe125867c | ||
|
|
0737893240 | ||
|
|
282fe3d348 | ||
|
|
b5d83640ae | ||
|
|
2823d3ad21 | ||
|
|
00b93bfe86 | ||
|
|
84c253d887 | ||
|
|
2ab5a702c9 | ||
|
|
11d9cdf24e | ||
|
|
b1de41619b | ||
|
|
18a4881a51 | ||
|
|
de76a168c0 | ||
|
|
bcc13a742d | ||
|
|
23b584f4bf | ||
|
|
074b7c1ff5 | ||
|
|
c04e9ed914 | ||
|
|
1f1b126e79 | ||
|
|
9b0dd80f13 | ||
|
|
b0807478f2 | ||
|
|
11dfa58ecd | ||
|
|
412f396e0a | ||
|
|
8100d43ff2 | ||
|
|
bc96acef48 | ||
|
|
6e9876b61a | ||
|
|
32253ca4f7 | ||
|
|
4c6be5e283 | ||
|
|
69178fd7bd | ||
|
|
cd4432c14b | ||
|
|
0e47172aec | ||
|
|
efe18ae8b2 | ||
|
|
d5e13df4fe | ||
|
|
b0e84d74f2 | ||
|
|
28526b591c | ||
|
|
51e6a6edb1 | ||
|
|
c1b132a29e | ||
|
|
e2530e7d57 | ||
|
|
f2fcb1599b | ||
|
|
160eafa0c9 | ||
|
|
c2bc0f1368 | ||
|
|
27d27fff81 | ||
|
|
66e8f0ce18 | ||
|
|
0ceae8361b | ||
|
|
5e52fded83 | ||
|
|
0a7d07b4b6 | ||
|
|
34b5f483d7 | ||
|
|
9d04a1bc52 | ||
|
|
b25c0aaa00 | ||
|
|
652b93ea45 | ||
|
|
ccb7726511 | ||
|
|
c514a4e451 | ||
|
|
d841bd6890 | ||
|
|
ff54e10ab2 | ||
|
|
718e562741 | ||
|
|
ce05e2a939 | ||
|
|
90831d3084 | ||
|
|
2c928dead5 | ||
|
|
3ffe147664 | ||
|
|
3373b0f47c | ||
|
|
b29db04560 | ||
|
|
f964a0362e | ||
|
|
537b23dfae | ||
|
|
48cf3528b4 | ||
|
|
3c4b9d32c9 | ||
|
|
fd8361ae2c | ||
|
|
33cadaa932 | ||
|
|
cc126eb8b4 | ||
|
|
c996b3f6aa | ||
|
|
e2b406a300 | ||
|
|
c2c69da603 | ||
|
|
4e51348ff2 | ||
|
|
3257b82706 | ||
|
|
c98d764daa | ||
|
|
0448429a9f | ||
|
|
b948ac6125 | ||
|
|
3acc09ea16 | ||
|
|
82d0d0de9d | ||
|
|
f9cfc4d087 | ||
|
|
7d68ff455b | ||
|
|
ddf4881971 | ||
|
|
9ad4944142 | ||
|
|
7f33ea76a4 | ||
|
|
1140c29384 | ||
|
|
2441a62f39 | ||
|
|
c26a231fc1 | ||
|
|
2fb2315037 | ||
|
|
a9e475481a | ||
|
|
826d7c4dc3 | ||
|
|
b7f4b37f66 | ||
|
|
193d691bfe | ||
|
|
a359bc581c | ||
|
|
9a28ff025a | ||
|
|
f1c7050700 | ||
|
|
9391c27b9e | ||
|
|
4c54de092f | ||
|
|
690c482a43 | ||
|
|
ad2d857c6f | ||
|
|
07ee59d2ef | ||
|
|
bec4617d0a | ||
|
|
94916f8305 | ||
|
|
44de651be3 | ||
|
|
bdcba9c642 | ||
|
|
c172f75f1a | ||
|
|
ec492fa13a | ||
|
|
702659959c | ||
|
|
fef332a591 | ||
|
|
a65ca72177 | ||
|
|
1108d90768 | ||
|
|
6715aa351f | ||
|
|
851497eb0a | ||
|
|
3bb4663e3e | ||
|
|
6953fcf6b5 | ||
|
|
ab844eee3f | ||
|
|
708e06aa3b | ||
|
|
aa8b8bbcae | ||
|
|
0ce1e15c2c | ||
|
|
105a83d946 | ||
|
|
e9a885a54d | ||
|
|
0a8759ee06 | ||
|
|
33ec21bbac | ||
|
|
7c00f65ecc | ||
|
|
7777c8f135 | ||
|
|
2386490002 | ||
|
|
b620f12027 | ||
|
|
00722181ad | ||
|
|
15e888a939 | ||
|
|
43fa600f1c | ||
|
|
2e4b5399c9 | ||
|
|
62cbb442e8 | ||
|
|
b0fe696935 | ||
|
|
42dbefbb31 | ||
|
|
f3dbe28681 | ||
|
|
6a5f1a7839 | ||
|
|
3b70f9fed4 | ||
|
|
7eb01aaa5c | ||
|
|
1e27e52fba | ||
|
|
16d73619e4 | ||
|
|
bc82696f15 | ||
|
|
fdb90623fc | ||
|
|
5fa62a9770 | ||
|
|
8f3df7e45d | ||
|
|
bb417587ae | ||
|
|
6b6e12cea3 | ||
|
|
65e70b2ca4 | ||
|
|
94d25f6f6a | ||
|
|
4bcf036831 | ||
|
|
901bc69a7d | ||
|
|
465217442b | ||
|
|
e6b40358aa | ||
|
|
9d48f7286a | ||
|
|
80311d3837 | ||
|
|
f501149068 | ||
|
|
750de62828 | ||
|
|
d2f338ceb6 | ||
|
|
e8d66979b3 | ||
|
|
b5180389f8 | ||
|
|
fbd5235e15 | ||
|
|
afd2267c26 | ||
|
|
9e798ababd | ||
|
|
e9f2fc8ee1 | ||
|
|
12198b4f06 | ||
|
|
15fae4d8f8 | ||
|
|
3de3fed858 | ||
|
|
1bf4255d93 | ||
|
|
b91a132e61 | ||
|
|
39302c9e93 | ||
|
|
65e21c4268 | ||
|
|
3d6a6a9fec | ||
|
|
d185902c86 | ||
|
|
8ce4ad83ed | ||
|
|
89620a96bc | ||
|
|
f1c008f934 | ||
|
|
4d688c9b47 | ||
|
|
db5481cc9c | ||
|
|
ce9a5e6484 | ||
|
|
550165b42b | ||
|
|
080551132a | ||
|
|
0a61848365 | ||
|
|
fcb9ca7795 | ||
|
|
71c58cee9e | ||
|
|
c811b6715d | ||
|
|
231829d8cd | ||
|
|
dbd2f8becb | ||
|
|
cc04e6614e | ||
|
|
a5c5ed614c | ||
|
|
ea13241317 | ||
|
|
a377a9ff6a | ||
|
|
f7e510b333 | ||
|
|
4472b80f1c | ||
|
|
577eb3eec9 | ||
|
|
1ed6a1a40f | ||
|
|
fe4cd1cddf | ||
|
|
6d7a8c8130 | ||
|
|
3057aeeacf | ||
|
|
bb5b63f62f | ||
|
|
58cd944618 | ||
|
|
5964b68c86 | ||
|
|
c87aaeba04 | ||
|
|
6e361005dc | ||
|
|
f5ab254bc5 | ||
|
|
298392b409 | ||
|
|
74a2bf0721 | ||
|
|
ddc5dc0316 | ||
|
|
d3af947553 | ||
|
|
36bb2509ac | ||
|
|
e4c2b0c2d3 | ||
|
|
ac5260ad43 | ||
|
|
33857109c9 | ||
|
|
8cc8f76204 | ||
|
|
8f3229928e | ||
|
|
2551992fd8 | ||
|
|
eb1decfce1 | ||
|
|
fd5e7b809f | ||
|
|
1ac681226d | ||
|
|
366940298d | ||
|
|
fa400ded7d | ||
|
|
ec9455ff75 | ||
|
|
2183f31ff5 | ||
|
|
67257a4212 | ||
|
|
001fa60a11 | ||
|
|
0ec3ed8be7 | ||
|
|
3ed0b8a464 | ||
|
|
fd610d44c0 | ||
|
|
b8cc4b4f0f | ||
|
|
396e51c27d | ||
|
|
36e61cb7a2 | ||
|
|
78c6484ddb | ||
|
|
3f1e90a5b3 | ||
|
|
e1bfec898f | ||
|
|
b5b816dac9 | ||
|
|
57854f23b7 | ||
|
|
9d7499b74f | ||
|
|
5b0b85c0f8 | ||
|
|
f7e8df618b | ||
|
|
d00d254c90 | ||
|
|
f9fbde6637 | ||
|
|
7b1a0474db | ||
|
|
da4f9b8e5f | ||
|
|
32f69d24b6 | ||
|
|
d032a61a9e | ||
|
|
07e0dc2ef5 | ||
|
|
9e175e8504 | ||
|
|
6b8a434cda | ||
|
|
554491a642 | ||
|
|
dc4e2f3c85 | ||
|
|
7d2c50991b | ||
|
|
83c204e010 | ||
|
|
316eb049dd | ||
|
|
be347b2428 | ||
|
|
a90c772827 | ||
|
|
26c70976c0 | ||
|
|
657310dc25 | ||
|
|
6e595eaf92 | ||
|
|
997831e33d | ||
|
|
5920cdc48f | ||
|
|
971e73f9cb | ||
|
|
bd9673c9de | ||
|
|
eded97d735 | ||
|
|
fdb1956b0b | ||
|
|
a915c04e9e | ||
|
|
07178ac69a | ||
|
|
9b434d4856 | ||
|
|
0758e97628 | ||
|
|
b486007f95 | ||
|
|
0c0887afef | ||
|
|
805ed81031 | ||
|
|
ec3fddf5b1 | ||
|
|
d7b0bc02ba | ||
|
|
4d1c8eae8f | ||
|
|
989ccf4ae3 | ||
|
|
9c089756c3 | ||
|
|
8d4b0914a8 | ||
|
|
1ae3f89aab | ||
|
|
b984f0423a | ||
|
|
f2f196cfcd | ||
|
|
6471d936bb | ||
|
|
21bbdccc41 | ||
|
|
48946fa4f7 | ||
|
|
9312dda7c2 | ||
|
|
e3013329ee | ||
|
|
38a0d2d740 | ||
|
|
5c2adf1e14 | ||
|
|
7ddd2c04c8 | ||
|
|
9a55632d8e | ||
|
|
f8b4427505 | ||
|
|
f1efc1456d | ||
|
|
2ea5851b67 | ||
|
|
a3051bc4e3 | ||
|
|
d454427b8b | ||
|
|
4b41bd6adf | ||
|
|
cdd044d120 | ||
|
|
213a793fbc | ||
|
|
a8a567c588 | ||
|
|
fefe89a1ed | ||
|
|
493fe2d523 | ||
|
|
d8fc830f1d | ||
|
|
b6c3ba0f0d | ||
|
|
32cd39d158 | ||
|
|
203275817f | ||
|
|
c05c3396b5 | ||
|
|
8f172aec8a | ||
|
|
263a7e2134 | ||
|
|
a2ea216604 | ||
|
|
77c572f990 | ||
|
|
bb0c346c4d | ||
|
|
2ce8e1fd21 | ||
|
|
ecfd94aeb1 | ||
|
|
eddc672264 | ||
|
|
8c71a39487 | ||
|
|
ff0ac27723 | ||
|
|
ad7134d283 | ||
|
|
58723ae52e |
13
.env
13
.env
@@ -6,13 +6,14 @@
|
||||
PROWLER_UI_VERSION="latest"
|
||||
SITE_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="latest"
|
||||
PROWLER_API_VERSION="stable"
|
||||
# PostgreSQL settings
|
||||
# If running Django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=postgres-db
|
||||
@@ -40,9 +41,12 @@ DJANGO_LOGGING_FORMATTER=human_readable
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30 # Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440 # Token lifetime is in minutes
|
||||
# Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_WORKERS=4
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_MANAGE_DB_PARTITIONS=True
|
||||
@@ -87,3 +91,4 @@ jQIDAQAB
|
||||
-----END PUBLIC KEY-----"
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
|
||||
4
.github/codeql/api-codeql-config.yml
vendored
4
.github/codeql/api-codeql-config.yml
vendored
@@ -1,3 +1,3 @@
|
||||
name: "Custom CodeQL Config for API"
|
||||
name: "API - CodeQL Config"
|
||||
paths:
|
||||
- 'api/'
|
||||
- "api/"
|
||||
|
||||
4
.github/codeql/codeql-config.yml
vendored
4
.github/codeql/codeql-config.yml
vendored
@@ -1,4 +0,0 @@
|
||||
name: "Custom CodeQL Config"
|
||||
paths-ignore:
|
||||
- 'api/'
|
||||
- 'ui/'
|
||||
4
.github/codeql/sdk-codeql-config.yml
vendored
Normal file
4
.github/codeql/sdk-codeql-config.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
name: "SDK - CodeQL Config"
|
||||
paths-ignore:
|
||||
- "api/"
|
||||
- "ui/"
|
||||
2
.github/codeql/ui-codeql-config.yml
vendored
2
.github/codeql/ui-codeql-config.yml
vendored
@@ -1,3 +1,3 @@
|
||||
name: "Custom CodeQL Config for UI"
|
||||
name: "UI - CodeQL Config"
|
||||
paths:
|
||||
- "ui/"
|
||||
|
||||
74
.github/dependabot.yml
vendored
74
.github/dependabot.yml
vendored
@@ -5,6 +5,7 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# v5
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
@@ -14,6 +15,18 @@ updates:
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/api"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
@@ -24,20 +37,77 @@ updates:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/ui"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "npm"
|
||||
- "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
|
||||
# v4.6
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v4"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v4"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
- "v4"
|
||||
|
||||
# v3
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
|
||||
5
.github/labeler.yml
vendored
5
.github/labeler.yml
vendored
@@ -22,6 +22,11 @@ provider/kubernetes:
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/**"
|
||||
|
||||
provider/github:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
|
||||
3
.github/pull_request_template.md
vendored
3
.github/pull_request_template.md
vendored
@@ -15,7 +15,8 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
98
.github/workflows/api-build-lint-push-containers.yml
vendored
Normal file
98
.github/workflows/api-build-lint-push-containers.yml
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
name: API - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "api/**"
|
||||
# - ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./api
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
10
.github/workflows/api-codeql.yml
vendored
10
.github/workflows/api-codeql.yml
vendored
@@ -9,22 +9,18 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "API - CodeQL"
|
||||
name: API - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
pull_request:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
23
.github/workflows/api-pull-request.yml
vendored
23
.github/workflows/api-pull-request.yml
vendored
@@ -1,14 +1,16 @@
|
||||
name: "API - Pull Request"
|
||||
name: API - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
@@ -69,6 +71,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
@@ -80,18 +83,21 @@ jobs:
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -108,49 +114,60 @@ jobs:
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock --check
|
||||
poetry check --lock
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Check Format with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Lint with pylint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
- name: Bandit
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
|
||||
37
.github/workflows/backport.yml
vendored
37
.github/workflows/backport.yml
vendored
@@ -1,42 +1,47 @@
|
||||
name: Automatic Backport
|
||||
name: Prowler - Automatic Backport
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
|
||||
env:
|
||||
# The prefix of the label that triggers the backport must not contain the branch name
|
||||
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
# Workaround not to fail the workflow if the PR does not need a backport
|
||||
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
|
||||
- name: Check for backport labels
|
||||
id: check_labels
|
||||
run: |-
|
||||
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
|
||||
echo "$labels"
|
||||
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
|
||||
echo "matched=$matched"
|
||||
echo "matched=$matched" >> $GITHUB_OUTPUT
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Backport Action
|
||||
if: fromJSON(steps.check_labels.outputs.matched) > 0
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: backport-to-
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Pull Request Documentation Link
|
||||
name: Prowler - Pull Request Documentation Link
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
6
.github/workflows/find-secrets.yml
vendored
6
.github/workflows/find-secrets.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Find secrets
|
||||
name: Prowler - Find secrets
|
||||
|
||||
on: pull_request
|
||||
|
||||
@@ -11,9 +11,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.84.1
|
||||
uses: trufflesecurity/trufflehog@v3.88.2
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
extra_args: --only-verified
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: "Pull Request Labeler"
|
||||
name: Prowler - PR Labeler
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
name: Build and Push containers
|
||||
name: SDK - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For `v3-latest`
|
||||
- "v3"
|
||||
# For `v4-latest`
|
||||
- "v4.6"
|
||||
# For `latest`
|
||||
- "master"
|
||||
paths-ignore:
|
||||
- ".github/**"
|
||||
@@ -64,7 +68,7 @@ jobs:
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx install poetry==1.8.5
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
@@ -85,8 +89,8 @@ jobs:
|
||||
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
|
||||
4)
|
||||
|
||||
4)
|
||||
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
@@ -9,22 +9,24 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
name: SDK - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
pull_request:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
@@ -55,7 +57,7 @@ jobs:
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
@@ -1,4 +1,4 @@
|
||||
name: "Pull Request"
|
||||
name: SDK - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -22,6 +22,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
@@ -36,17 +37,22 @@ jobs:
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
.env
|
||||
docker-compose*
|
||||
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
@@ -57,44 +63,56 @@ jobs:
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock --check
|
||||
poetry check --lock
|
||||
|
||||
- name: Lint with flake8
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
|
||||
|
||||
- name: Checking format with black
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run black --exclude api ui --check .
|
||||
|
||||
- name: Lint with pylint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
|
||||
- name: Bandit
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
|
||||
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612 -r pyproject.toml
|
||||
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
@@ -1,4 +1,4 @@
|
||||
name: PyPI release
|
||||
name: SDK - PyPI release
|
||||
|
||||
on:
|
||||
release:
|
||||
@@ -10,12 +10,40 @@ env:
|
||||
CACHE: "poetry"
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
release-prowler-job:
|
||||
runs-on: ubuntu-latest
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
name: Release Prowler to PyPI
|
||||
steps:
|
||||
- name: Repository check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ "${{ github.repository }}" != "prowler-cloud/prowler" ]]; then
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Get Prowler version
|
||||
run: |
|
||||
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
|
||||
@@ -40,7 +68,7 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -1,6 +1,6 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: Refresh regions of AWS services
|
||||
name: SDK - Refresh AWS services' regions
|
||||
|
||||
on:
|
||||
schedule:
|
||||
98
.github/workflows/ui-build-lint-push-containers.yml
vendored
Normal file
98
.github/workflows/ui-build-lint-push-containers.yml
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
name: UI - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "ui/**"
|
||||
- ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the below code to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "ui/**"
|
||||
# - ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./ui
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
4
.github/workflows/ui-codeql.yml
vendored
4
.github/workflows/ui-codeql.yml
vendored
@@ -9,20 +9,18 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "UI - CodeQL"
|
||||
name: UI - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
name: "UI - Pull Request"
|
||||
name: UI - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'ui/**'
|
||||
|
||||
@@ -20,7 +27,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
@@ -31,4 +38,4 @@ jobs:
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
run: npm run build
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -45,6 +45,7 @@ junit-reports/
|
||||
# Terraform
|
||||
.terraform*
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# .env
|
||||
ui/.env*
|
||||
|
||||
@@ -27,6 +27,7 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.1
|
||||
@@ -61,8 +62,25 @@ repos:
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
args: ["--no-update"]
|
||||
name: API - poetry-lock
|
||||
args: ["--no-update", "--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
name: SDK - poetry-check
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--no-update", "--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
@@ -90,7 +108,7 @@ repos:
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/' -r .'
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
@@ -103,7 +121,6 @@ repos:
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'vulture --exclude "contrib" --min-confidence 100 .'
|
||||
exclude: 'api/src/backend/'
|
||||
entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12-alpine
|
||||
FROM python:3.12.8-alpine3.20
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
|
||||
26
README.md
26
README.md
@@ -3,7 +3,7 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
@@ -29,7 +29,7 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler?include_prereleases"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
|
||||
@@ -43,7 +43,7 @@
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
|
||||
## Prowler App
|
||||
|
||||
@@ -72,9 +72,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 561 | 81 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 3 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 139 | 18 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 4 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 139 | 18 -> `prowler azure --list-services` | 5 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 2 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
@@ -98,6 +98,7 @@ curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/mast
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
### From GitHub
|
||||
@@ -139,6 +140,19 @@ cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
**Commands to run the API Scheduler**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
```
|
||||
|
||||
**Commands to run the UI**
|
||||
|
||||
``` console
|
||||
|
||||
@@ -22,6 +22,7 @@ DJANGO_SECRETS_ENCRYPTION_KEY=""
|
||||
# Decide whether to allow Django manage database table partitions
|
||||
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
|
||||
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
|
||||
# PostgreSQL settings
|
||||
# If running django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12-alpine AS build
|
||||
FROM python:3.12.8-alpine3.20 AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
2335
api/poetry.lock
generated
2335
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,11 +8,11 @@ description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
version = "1.0.0"
|
||||
version = "1.3.2"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
django = "5.1.1"
|
||||
django = "5.1.5"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
@@ -27,7 +27,7 @@ drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "master"}
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "v5.2"}
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
@@ -37,6 +37,7 @@ uuid6 = "2024.7.10"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
mypy = "1.10.1"
|
||||
@@ -48,8 +49,8 @@ pytest-env = "1.1.3"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.2.3"
|
||||
vulture = "2.11"
|
||||
safety = "3.2.9"
|
||||
vulture = "2.14"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
import uuid
|
||||
|
||||
from django.db import transaction, connection
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
from api.models import Role, Tenant
|
||||
from api.rbac.permissions import HasPermissions
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [JWTAuthentication]
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
required_permissions = []
|
||||
permission_classes = [permissions.IsAuthenticated, HasPermissions]
|
||||
filter_backends = [
|
||||
filters.QueryParameterValidationFilter,
|
||||
filters.OrderingFilter,
|
||||
@@ -28,6 +31,17 @@ class BaseViewSet(ModelViewSet):
|
||||
ordering_fields = "__all__"
|
||||
ordering = ["id"]
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
"""
|
||||
Sets required_permissions before permissions are checked.
|
||||
"""
|
||||
self.set_required_permissions()
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
def set_required_permissions(self):
|
||||
"""This is an abstract method that must be implemented by subclasses."""
|
||||
NotImplemented
|
||||
|
||||
def get_queryset(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -47,13 +61,7 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
@@ -66,13 +74,60 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
if request.method == "POST":
|
||||
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
|
||||
except Exception as e:
|
||||
self._handle_creation_error(e, tenant)
|
||||
raise
|
||||
|
||||
return tenant
|
||||
|
||||
def _create_admin_role(self, tenant_id):
|
||||
Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant_id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
|
||||
def _handle_creation_error(self, error, tenant):
|
||||
if tenant.data.get("id"):
|
||||
try:
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(
|
||||
id=tenant.data["id"]
|
||||
).delete()
|
||||
except ObjectDoesNotExist:
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
user_id = str(request.user.id)
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.user_id', '{user_id}', TRUE);")
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -92,12 +147,6 @@ class BaseUserViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
@@ -4,13 +4,17 @@ class MainRouter:
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
if model_table_name.startswith("django_") or model_table_name.startswith(
|
||||
"silk_"
|
||||
):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
if model_table_name.startswith("django_") or model_table_name.startswith(
|
||||
"silk_"
|
||||
):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.core.paginator import Paginator
|
||||
from django.db import connection, models, transaction
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
|
||||
DB_PASSWORD = (
|
||||
@@ -23,6 +24,8 @@ TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
|
||||
POSTGRES_TENANT_VAR = "api.tenant_id"
|
||||
POSTGRES_USER_VAR = "api.user_id"
|
||||
|
||||
SET_CONFIG_QUERY = "SELECT set_config(%s, %s::text, TRUE);"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def psycopg_connection(database_alias: str):
|
||||
@@ -44,10 +47,23 @@ def psycopg_connection(database_alias: str):
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tenant_transaction(tenant_id: str):
|
||||
def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
|
||||
"""
|
||||
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
|
||||
if the value is a valid UUID.
|
||||
|
||||
Args:
|
||||
value (str): Database configuration parameter value.
|
||||
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
|
||||
"""
|
||||
with transaction.atomic():
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
try:
|
||||
# just in case the value is an UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
|
||||
yield cursor
|
||||
|
||||
|
||||
@@ -103,15 +119,18 @@ def batch_delete(queryset, batch_size=5000):
|
||||
total_deleted = 0
|
||||
deletion_summary = {}
|
||||
|
||||
paginator = Paginator(queryset.order_by("id").only("id"), batch_size)
|
||||
|
||||
for page_num in paginator.page_range:
|
||||
batch_ids = [obj.id for obj in paginator.page(page_num).object_list]
|
||||
while True:
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
|
||||
for model_label, count in deleted_info.items():
|
||||
deletion_summary[model_label] = deletion_summary.get(model_label, 0) + count
|
||||
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import uuid
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
|
||||
|
||||
def set_tenant(func):
|
||||
@@ -31,7 +35,7 @@ def set_tenant(func):
|
||||
pass
|
||||
|
||||
# When calling the task
|
||||
some_task.delay(arg1, tenant_id="1234-abcd-5678")
|
||||
some_task.delay(arg1, tenant_id="8db7ca86-03cc-4d42-99f6-5e480baf6ab5")
|
||||
|
||||
# The tenant context will be set before the task logic executes.
|
||||
"""
|
||||
@@ -43,9 +47,12 @@ def set_tenant(func):
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -26,11 +26,13 @@ from api.models import (
|
||||
Finding,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
@@ -317,6 +319,28 @@ class FindingFilter(FilterSet):
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
@@ -424,6 +448,16 @@ class FindingFilter(FilterSet):
|
||||
|
||||
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
dt = value
|
||||
@@ -481,6 +515,26 @@ class UserFilter(FilterSet):
|
||||
}
|
||||
|
||||
|
||||
class RoleFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
permission_state = ChoiceFilter(
|
||||
choices=PermissionChoices.choices, method="filter_permission_state"
|
||||
)
|
||||
|
||||
def filter_permission_state(self, queryset, name, value):
|
||||
return Role.filter_by_permission_state(queryset, value)
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
@@ -521,3 +575,25 @@ class ScanSummaryFilter(FilterSet):
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
muted_findings = None
|
||||
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
self.data.get("inserted_at"),
|
||||
self.data.get("inserted_at__gte"),
|
||||
self.data.get("inserted_at__lte"),
|
||||
]
|
||||
if not any(inserted_at_filters):
|
||||
raise ValidationError(
|
||||
{
|
||||
"inserted_at": [
|
||||
"At least one of filter[inserted_at], filter[inserted_at__gte], or "
|
||||
"filter[inserted_at__lte] is required."
|
||||
]
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.823Z",
|
||||
"updated_at": "2024-10-18T10:46:04.841Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.823Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -61,6 +62,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.855Z",
|
||||
"updated_at": "2024-10-18T10:46:04.858Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.855Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -116,6 +118,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.869Z",
|
||||
"updated_at": "2024-10-18T10:46:04.876Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.869Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -171,6 +174,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.888Z",
|
||||
"updated_at": "2024-10-18T10:46:04.892Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.888Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -226,6 +230,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.901Z",
|
||||
"updated_at": "2024-10-18T10:46:04.905Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.901Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -281,6 +286,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.915Z",
|
||||
"updated_at": "2024-10-18T10:46:04.919Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.915Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -336,6 +342,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.929Z",
|
||||
"updated_at": "2024-10-18T10:46:04.934Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.929Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -391,6 +398,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.944Z",
|
||||
"updated_at": "2024-10-18T10:46:04.947Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.944Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -446,6 +454,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.957Z",
|
||||
"updated_at": "2024-10-18T10:46:04.962Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.957Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"delta": "new",
|
||||
"status": "PASS",
|
||||
@@ -501,6 +510,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.971Z",
|
||||
"updated_at": "2024-10-18T10:46:04.975Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.971Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -556,6 +566,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.984Z",
|
||||
"updated_at": "2024-10-18T10:46:04.989Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.984Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -611,6 +622,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.999Z",
|
||||
"updated_at": "2024-10-18T10:46:05.003Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.999Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -666,6 +678,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.013Z",
|
||||
"updated_at": "2024-10-18T10:46:05.018Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.013Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -721,6 +734,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.029Z",
|
||||
"updated_at": "2024-10-18T10:46:05.033Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.029Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -776,6 +790,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.045Z",
|
||||
"updated_at": "2024-10-18T10:46:05.050Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.045Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -831,6 +846,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.061Z",
|
||||
"updated_at": "2024-10-18T10:46:05.065Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.061Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -886,6 +902,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.080Z",
|
||||
"updated_at": "2024-10-18T10:46:05.085Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.080Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -941,6 +958,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.099Z",
|
||||
"updated_at": "2024-10-18T10:46:05.104Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.099Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -996,6 +1014,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.115Z",
|
||||
"updated_at": "2024-10-18T10:46:05.121Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.115Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -1051,6 +1070,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.489Z",
|
||||
"updated_at": "2024-10-18T11:16:24.506Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.823Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1106,6 +1126,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.518Z",
|
||||
"updated_at": "2024-10-18T11:16:24.521Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.855Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1161,6 +1182,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.526Z",
|
||||
"updated_at": "2024-10-18T11:16:24.529Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.869Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1216,6 +1238,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.535Z",
|
||||
"updated_at": "2024-10-18T11:16:24.538Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.888Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1271,6 +1294,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.544Z",
|
||||
"updated_at": "2024-10-18T11:16:24.546Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.901Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1326,6 +1350,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.551Z",
|
||||
"updated_at": "2024-10-18T11:16:24.554Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.915Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1381,6 +1406,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.560Z",
|
||||
"updated_at": "2024-10-18T11:16:24.562Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.929Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1436,6 +1462,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.567Z",
|
||||
"updated_at": "2024-10-18T11:16:24.569Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.944Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1491,6 +1518,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.573Z",
|
||||
"updated_at": "2024-10-18T11:16:24.575Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.957Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"delta": null,
|
||||
"status": "PASS",
|
||||
@@ -1546,6 +1574,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.580Z",
|
||||
"updated_at": "2024-10-18T11:16:24.582Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.971Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1601,6 +1630,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.587Z",
|
||||
"updated_at": "2024-10-18T11:16:24.589Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.984Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1656,6 +1686,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.595Z",
|
||||
"updated_at": "2024-10-18T11:16:24.597Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.999Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1711,6 +1742,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.602Z",
|
||||
"updated_at": "2024-10-18T11:16:24.604Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.013Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1766,6 +1798,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.610Z",
|
||||
"updated_at": "2024-10-18T11:16:24.612Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.029Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1821,6 +1854,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.617Z",
|
||||
"updated_at": "2024-10-18T11:16:24.620Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.045Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1876,6 +1910,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.625Z",
|
||||
"updated_at": "2024-10-18T11:16:24.627Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.061Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1931,6 +1966,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.632Z",
|
||||
"updated_at": "2024-10-18T11:16:24.634Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.080Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1986,6 +2022,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.639Z",
|
||||
"updated_at": "2024-10-18T11:16:24.642Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.099Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -2041,6 +2078,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.646Z",
|
||||
"updated_at": "2024-10-18T11:16:24.648Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.115Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -2096,6 +2134,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:26.033Z",
|
||||
"updated_at": "2024-10-18T11:16:26.045Z",
|
||||
"first_seen_at": "2024-10-18T11:16:26.033Z",
|
||||
"uid": "prowler-aws-account_security_contact_information_is_registered-112233445566-us-east-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "MANUAL",
|
||||
|
||||
@@ -58,5 +58,96 @@
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-13T11:55:41.237Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "admin_test",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z",
|
||||
"updated_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "first_role",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": false,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:31:53.239Z",
|
||||
"updated_at": "2024-11-20T15:31:53.239Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "third_role",
|
||||
"manage_users": false,
|
||||
"manage_account": false,
|
||||
"manage_billing": false,
|
||||
"manage_providers": false,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": false,
|
||||
"inserted_at": "2024-11-20T15:34:05.440Z",
|
||||
"updated_at": "2024-11-20T15:34:05.440Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"inserted_at": "2024-11-20T15:36:14.302Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-20 13:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0002_token_migrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="provider",
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="provider",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "uid", "is_deleted"),
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
),
|
||||
]
|
||||
248
api/src/backend/api/migrations/0004_rbac.py
Normal file
248
api/src/backend/api/migrations/0004_rbac.py
Normal file
@@ -0,0 +1,248 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-05 12:29
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0003_update_provider_unique_constraint_with_is_deleted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Role",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("manage_users", models.BooleanField(default=False)),
|
||||
("manage_account", models.BooleanField(default=False)),
|
||||
("manage_billing", models.BooleanField(default=False)),
|
||||
("manage_providers", models.BooleanField(default=False)),
|
||||
("manage_integrations", models.BooleanField(default=False)),
|
||||
("manage_scans", models.BooleanField(default=False)),
|
||||
("unlimited_visibility", models.BooleanField(default=False)),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "roles",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RoleProviderGroupRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_provider_group_relationship",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_user_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="provider_group",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="provider_groups",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.RoleProviderGroupRelationship",
|
||||
to="api.providergroup",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="user",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="users",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.UserRoleRelationship",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "provider_group_id"),
|
||||
name="unique_role_provider_group_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_roleprovidergrouprelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "user_id"), name="unique_role_user_relationship"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_userrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "name"), name="unique_role_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_role",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InvitationRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"invitation",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
|
||||
),
|
||||
),
|
||||
(
|
||||
"role",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_invitation_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "invitation_id"),
|
||||
name="unique_role_invitation_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_invitationrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="invitations",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.InvitationRoleRelationship",
|
||||
to="api.invitation",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,44 @@
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_router import MainRouter
|
||||
|
||||
|
||||
def create_admin_role(apps, schema_editor):
|
||||
Tenant = apps.get_model("api", "Tenant")
|
||||
Role = apps.get_model("api", "Role")
|
||||
User = apps.get_model("api", "User")
|
||||
UserRoleRelationship = apps.get_model("api", "UserRoleRelationship")
|
||||
|
||||
for tenant in Tenant.objects.using(MainRouter.admin_db).all():
|
||||
admin_role, _ = Role.objects.using(MainRouter.admin_db).get_or_create(
|
||||
name="admin",
|
||||
tenant=tenant,
|
||||
defaults={
|
||||
"manage_users": True,
|
||||
"manage_account": True,
|
||||
"manage_billing": True,
|
||||
"manage_providers": True,
|
||||
"manage_integrations": True,
|
||||
"manage_scans": True,
|
||||
"unlimited_visibility": True,
|
||||
},
|
||||
)
|
||||
users = User.objects.using(MainRouter.admin_db).filter(
|
||||
membership__tenant=tenant
|
||||
)
|
||||
for user in users:
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).get_or_create(
|
||||
user=user,
|
||||
role=admin_role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0004_rbac"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_admin_role),
|
||||
]
|
||||
15
api/src/backend/api/migrations/0006_findings_first_seen.py
Normal file
15
api/src/backend/api/migrations/0006_findings_first_seen.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0005_rbac_missing_admin_roles"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="first_seen_at",
|
||||
field=models.DateTimeField(editable=False, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.1.5 on 2025-01-28 15:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0006_findings_first_seen"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="scan_summaries_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -69,6 +69,21 @@ class StateChoices(models.TextChoices):
|
||||
CANCELLED = "cancelled", _("Cancelled")
|
||||
|
||||
|
||||
class PermissionChoices(models.TextChoices):
|
||||
"""
|
||||
Represents the different permission states that a role can have.
|
||||
|
||||
Attributes:
|
||||
UNLIMITED: Indicates that the role possesses all permissions.
|
||||
LIMITED: Indicates that the role has some permissions but not all.
|
||||
NONE: Indicates that the role does not have any permissions.
|
||||
"""
|
||||
|
||||
UNLIMITED = "unlimited", _("Unlimited permissions")
|
||||
LIMITED = "limited", _("Limited permissions")
|
||||
NONE = "none", _("No permissions")
|
||||
|
||||
|
||||
class ActiveProviderManager(models.Manager):
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(self.active_provider_filter())
|
||||
@@ -256,7 +271,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "uid"),
|
||||
fields=("tenant_id", "provider", "uid", "is_deleted"),
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
@@ -298,19 +313,10 @@ class ProviderGroup(RowLevelSecurityProtectedModel):
|
||||
|
||||
|
||||
class ProviderGroupMembership(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
provider_group = models.ForeignKey(
|
||||
ProviderGroup,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "provider_group_memberships"
|
||||
@@ -327,7 +333,7 @@ class ProviderGroupMembership(RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "provider-group-memberships"
|
||||
resource_name = "provider_groups-provider"
|
||||
|
||||
|
||||
class Task(RowLevelSecurityProtectedModel):
|
||||
@@ -422,6 +428,10 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
fields=["provider", "state", "trigger", "scheduled_at"],
|
||||
name="scans_prov_state_trig_sche_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -509,8 +519,8 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
through="ResourceTagMapping",
|
||||
)
|
||||
|
||||
def get_tags(self) -> dict:
|
||||
return {tag.key: tag.value for tag in self.tags.all()}
|
||||
def get_tags(self, tenant_id: str) -> dict:
|
||||
return {tag.key: tag.value for tag in self.tags.filter(tenant_id=tenant_id)}
|
||||
|
||||
def clear_tags(self):
|
||||
self.tags.clear()
|
||||
@@ -609,6 +619,7 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid7, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
first_seen_at = models.DateTimeField(editable=False, null=True)
|
||||
|
||||
uid = models.CharField(max_length=300)
|
||||
delta = FindingDeltaEnumField(
|
||||
@@ -851,6 +862,150 @@ class Invitation(RowLevelSecurityProtectedModel):
|
||||
resource_name = "invitations"
|
||||
|
||||
|
||||
class Role(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
name = models.CharField(max_length=255)
|
||||
manage_users = models.BooleanField(default=False)
|
||||
manage_account = models.BooleanField(default=False)
|
||||
manage_billing = models.BooleanField(default=False)
|
||||
manage_providers = models.BooleanField(default=False)
|
||||
manage_integrations = models.BooleanField(default=False)
|
||||
manage_scans = models.BooleanField(default=False)
|
||||
unlimited_visibility = models.BooleanField(default=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
provider_groups = models.ManyToManyField(
|
||||
ProviderGroup, through="RoleProviderGroupRelationship", related_name="roles"
|
||||
)
|
||||
users = models.ManyToManyField(
|
||||
User, through="UserRoleRelationship", related_name="roles"
|
||||
)
|
||||
invitations = models.ManyToManyField(
|
||||
Invitation, through="InvitationRoleRelationship", related_name="roles"
|
||||
)
|
||||
|
||||
# Filter permission_state
|
||||
PERMISSION_FIELDS = [
|
||||
"manage_users",
|
||||
"manage_account",
|
||||
"manage_billing",
|
||||
"manage_providers",
|
||||
"manage_integrations",
|
||||
"manage_scans",
|
||||
]
|
||||
|
||||
@property
|
||||
def permission_state(self):
|
||||
values = [getattr(self, field) for field in self.PERMISSION_FIELDS]
|
||||
if all(values):
|
||||
return PermissionChoices.UNLIMITED
|
||||
elif not any(values):
|
||||
return PermissionChoices.NONE
|
||||
else:
|
||||
return PermissionChoices.LIMITED
|
||||
|
||||
@classmethod
|
||||
def filter_by_permission_state(cls, queryset, value):
|
||||
q_all_true = Q(**{field: True for field in cls.PERMISSION_FIELDS})
|
||||
q_all_false = Q(**{field: False for field in cls.PERMISSION_FIELDS})
|
||||
|
||||
if value == PermissionChoices.UNLIMITED:
|
||||
return queryset.filter(q_all_true)
|
||||
elif value == PermissionChoices.NONE:
|
||||
return queryset.filter(q_all_false)
|
||||
else:
|
||||
return queryset.exclude(q_all_true | q_all_false)
|
||||
|
||||
class Meta:
|
||||
db_table = "roles"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id", "name"],
|
||||
name="unique_role_per_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "roles"
|
||||
|
||||
|
||||
class RoleProviderGroupRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
role = models.ForeignKey(Role, on_delete=models.CASCADE)
|
||||
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "role_provider_group_relationship"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["role_id", "provider_group_id"],
|
||||
name="unique_role_provider_group_relationship",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "role-provider_groups"
|
||||
|
||||
|
||||
class UserRoleRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
role = models.ForeignKey(Role, on_delete=models.CASCADE)
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "role_user_relationship"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["role_id", "user_id"],
|
||||
name="unique_role_user_relationship",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "user-roles"
|
||||
|
||||
|
||||
class InvitationRoleRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
role = models.ForeignKey(Role, on_delete=models.CASCADE)
|
||||
invitation = models.ForeignKey(Invitation, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "role_invitation_relationship"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["role_id", "invitation_id"],
|
||||
name="unique_role_invitation_relationship",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "invitation-roles"
|
||||
|
||||
|
||||
class ComplianceOverview(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
@@ -949,6 +1104,12 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"],
|
||||
name="scan_summaries_tenant_scan_idx",
|
||||
)
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scan-summaries"
|
||||
|
||||
75
api/src/backend/api/rbac/permissions.py
Normal file
75
api/src/backend/api/rbac/permissions.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from rest_framework.permissions import BasePermission
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Provider, Role, User
|
||||
|
||||
|
||||
class Permissions(Enum):
|
||||
MANAGE_USERS = "manage_users"
|
||||
MANAGE_ACCOUNT = "manage_account"
|
||||
MANAGE_BILLING = "manage_billing"
|
||||
MANAGE_PROVIDERS = "manage_providers"
|
||||
MANAGE_INTEGRATIONS = "manage_integrations"
|
||||
MANAGE_SCANS = "manage_scans"
|
||||
UNLIMITED_VISIBILITY = "unlimited_visibility"
|
||||
|
||||
|
||||
class HasPermissions(BasePermission):
|
||||
"""
|
||||
Custom permission to check if the user's role has the required permissions.
|
||||
The required permissions should be specified in the view as a list in `required_permissions`.
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
required_permissions = getattr(view, "required_permissions", [])
|
||||
if not required_permissions:
|
||||
return True
|
||||
|
||||
user_roles = (
|
||||
User.objects.using(MainRouter.admin_db).get(id=request.user.id).roles.all()
|
||||
)
|
||||
if not user_roles:
|
||||
return False
|
||||
|
||||
for perm in required_permissions:
|
||||
if not getattr(user_roles[0], perm.value, False):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_role(user: User) -> Optional[Role]:
|
||||
"""
|
||||
Retrieve the first role assigned to the given user.
|
||||
|
||||
Returns:
|
||||
The user's first Role instance if the user has any roles, otherwise None.
|
||||
"""
|
||||
return user.roles.first()
|
||||
|
||||
|
||||
def get_providers(role: Role) -> QuerySet[Provider]:
|
||||
"""
|
||||
Return a distinct queryset of Providers accessible by the given role.
|
||||
|
||||
If the role has no associated provider groups, an empty queryset is returned.
|
||||
|
||||
Args:
|
||||
role: A Role instance.
|
||||
|
||||
Returns:
|
||||
A QuerySet of Provider objects filtered by the role's provider groups.
|
||||
If the role has no provider groups, returns an empty queryset.
|
||||
"""
|
||||
tenant = role.tenant
|
||||
provider_groups = role.provider_groups.all()
|
||||
if not provider_groups.exists():
|
||||
return Provider.objects.none()
|
||||
|
||||
return Provider.objects.filter(
|
||||
tenant=tenant, provider_groups__in=provider_groups
|
||||
).distinct()
|
||||
@@ -2,7 +2,7 @@ from contextlib import nullcontext
|
||||
|
||||
from rest_framework_json_api.renderers import JSONRenderer
|
||||
|
||||
from api.db_utils import tenant_transaction
|
||||
from api.db_utils import rls_transaction
|
||||
|
||||
|
||||
class APIJSONRenderer(JSONRenderer):
|
||||
@@ -13,9 +13,9 @@ class APIJSONRenderer(JSONRenderer):
|
||||
tenant_id = getattr(request, "tenant_id", None) if request else None
|
||||
include_param_present = "include" in request.query_params if request else False
|
||||
|
||||
# Use tenant_transaction if needed for included resources, otherwise do nothing
|
||||
# Use rls_transaction if needed for included resources, otherwise do nothing
|
||||
context_manager = (
|
||||
tenant_transaction(tenant_id)
|
||||
rls_transaction(tenant_id)
|
||||
if tenant_id and include_param_present
|
||||
else nullcontext()
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,9 @@
|
||||
import pytest
|
||||
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from unittest.mock import patch
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
|
||||
|
||||
@patch("api.v1.views.MainRouter.admin_db", new="default")
|
||||
@pytest.mark.django_db
|
||||
def test_basic_authentication():
|
||||
client = APIClient()
|
||||
@@ -98,3 +95,85 @@ def test_refresh_token(create_test_user, tenants_fixture):
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert new_refresh_response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fixture):
|
||||
client = APIClient()
|
||||
|
||||
role = roles_fixture[0]
|
||||
|
||||
user1_email = "user1@testing.com"
|
||||
user2_email = "user2@testing.com"
|
||||
|
||||
password = "thisisapassword123"
|
||||
|
||||
user1_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "user1",
|
||||
"email": user1_email,
|
||||
"password": password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user1_response.status_code == 201
|
||||
|
||||
user1_access_token, _ = get_api_tokens(client, user1_email, password)
|
||||
user1_headers = get_authorization_header(user1_access_token)
|
||||
|
||||
user2_invitation = client.post(
|
||||
reverse("invitation-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "invitations",
|
||||
"attributes": {"email": user2_email},
|
||||
"relationships": {
|
||||
"roles": {
|
||||
"data": [
|
||||
{
|
||||
"type": "roles",
|
||||
"id": str(role.id),
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=user1_headers,
|
||||
)
|
||||
assert user2_invitation.status_code == 201
|
||||
invitation_token = user2_invitation.json()["data"]["attributes"]["token"]
|
||||
|
||||
user2_response = client.post(
|
||||
reverse("user-list") + f"?invitation_token={invitation_token}",
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "user2",
|
||||
"email": user2_email,
|
||||
"password": password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user2_response.status_code == 201
|
||||
|
||||
user2_access_token, _ = get_api_tokens(client, user2_email, password)
|
||||
user2_headers = get_authorization_header(user2_access_token)
|
||||
|
||||
user1_me = client.get(reverse("user-me"), headers=user1_headers)
|
||||
assert user1_me.status_code == 200
|
||||
assert user1_me.json()["data"]["attributes"]["email"] == user1_email
|
||||
|
||||
user2_me = client.get(reverse("user-me"), headers=user2_headers)
|
||||
assert user2_me.status_code == 200
|
||||
assert user2_me.json()["data"]["attributes"]["email"] == user2_email
|
||||
|
||||
85
api/src/backend/api/tests/integration/test_providers.py
Normal file
85
api/src/backend/api/tests/integration/test_providers.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.delete_provider_task.delay")
|
||||
@pytest.mark.django_db
|
||||
def test_delete_provider_without_executing_task(
|
||||
mock_delete_task, mock_task_get, create_test_user, tenants_fixture, tasks_fixture
|
||||
):
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
mock_delete_task.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
create_provider_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": Provider.ProviderChoices.AWS,
|
||||
"uid": "123456789012",
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert create_provider_response.status_code == 201
|
||||
provider_id = create_provider_response.json()["data"]["id"]
|
||||
provider_uid = create_provider_response.json()["data"]["attributes"]["uid"]
|
||||
|
||||
remove_provider = client.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider_id}),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert remove_provider.status_code == 202
|
||||
|
||||
recreate_provider_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": Provider.ProviderChoices.AWS,
|
||||
"uid": provider_uid,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert recreate_provider_response.status_code == 201
|
||||
@@ -13,6 +13,7 @@ def test_check_resources_between_different_tenants(
|
||||
enforce_test_user_db_connection,
|
||||
authenticated_api_client,
|
||||
tenants_fixture,
|
||||
set_user_admin_roles_fixture,
|
||||
):
|
||||
client = authenticated_api_client
|
||||
|
||||
|
||||
@@ -6,8 +6,10 @@ from django.db.utils import ConnectionRouter
|
||||
from api.db_router import MainRouter
|
||||
from api.rls import Tenant
|
||||
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@patch("api.db_router.MainRouter.admin_db", new="admin")
|
||||
class TestMainDatabaseRouter:
|
||||
@pytest.fixture(scope="module")
|
||||
def router(self):
|
||||
|
||||
@@ -2,7 +2,15 @@ from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
from api.db_utils import enum_to_choices, one_week_from_now, generate_random_token
|
||||
import pytest
|
||||
|
||||
from api.db_utils import (
|
||||
batch_delete,
|
||||
enum_to_choices,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
class TestEnumToChoices:
|
||||
@@ -106,3 +114,26 @@ class TestGenerateRandomToken:
|
||||
token = generate_random_token(length=5, symbols="")
|
||||
# Default symbols
|
||||
assert len(token) == 5
|
||||
|
||||
|
||||
class TestBatchDelete:
|
||||
@pytest.fixture
|
||||
def create_test_providers(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
provider_id = 123456789012
|
||||
provider_count = 10
|
||||
for i in range(provider_count):
|
||||
Provider.objects.create(
|
||||
tenant=tenant,
|
||||
uid=f"{provider_id + i}",
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
return provider_count
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_batch_delete(self, create_test_providers):
|
||||
_, summary = batch_delete(
|
||||
Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from unittest.mock import patch, call
|
||||
import uuid
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.decorators import set_tenant
|
||||
|
||||
|
||||
@@ -15,12 +17,12 @@ class TestSetTenantDecorator:
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
tenant_id = "1234-abcd-5678"
|
||||
tenant_id = str(uuid.uuid4())
|
||||
|
||||
result = random_func("test_arg", tenant_id=tenant_id)
|
||||
|
||||
assert (
|
||||
call(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
call(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
in mock_cursor.execute.mock_calls
|
||||
)
|
||||
assert result == "test_arg"
|
||||
|
||||
@@ -7,9 +7,10 @@ from api.models import Resource, ResourceTag
|
||||
class TestResourceModel:
|
||||
def test_setting_tags(self, providers_fixture):
|
||||
provider, *_ = providers_fixture
|
||||
tenant_id = provider.tenant_id
|
||||
|
||||
resource = Resource.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0",
|
||||
name="My Instance 1",
|
||||
@@ -20,12 +21,12 @@ class TestResourceModel:
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
tenant_id=tenant_id,
|
||||
key="key",
|
||||
value="value",
|
||||
),
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
tenant_id=tenant_id,
|
||||
key="key2",
|
||||
value="value2",
|
||||
),
|
||||
@@ -33,9 +34,9 @@ class TestResourceModel:
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert len(tags) == len(resource.tags.all())
|
||||
assert len(tags) == len(resource.tags.filter(tenant_id=tenant_id))
|
||||
|
||||
tags_dict = resource.get_tags()
|
||||
tags_dict = resource.get_tags(tenant_id=tenant_id)
|
||||
|
||||
for tag in tags:
|
||||
assert tag.key in tags_dict
|
||||
@@ -43,47 +44,51 @@ class TestResourceModel:
|
||||
|
||||
def test_adding_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=resource.tenant_id,
|
||||
tenant_id=tenant_id,
|
||||
key="env",
|
||||
value="test",
|
||||
),
|
||||
]
|
||||
before_count = len(resource.tags.all())
|
||||
before_count = len(resource.tags.filter(tenant_id=tenant_id))
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert before_count + 1 == len(resource.tags.all())
|
||||
assert before_count + 1 == len(resource.tags.filter(tenant_id=tenant_id))
|
||||
|
||||
tags_dict = resource.get_tags()
|
||||
tags_dict = resource.get_tags(tenant_id=tenant_id)
|
||||
|
||||
assert "env" in tags_dict
|
||||
assert tags_dict["env"] == "test"
|
||||
|
||||
def test_adding_duplicate_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
|
||||
tags = resource.tags.all()
|
||||
tags = resource.tags.filter(tenant_id=tenant_id)
|
||||
|
||||
before_count = len(resource.tags.all())
|
||||
before_count = len(resource.tags.filter(tenant_id=tenant_id))
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
# should be the same number of tags
|
||||
assert before_count == len(resource.tags.all())
|
||||
assert before_count == len(resource.tags.filter(tenant_id=tenant_id))
|
||||
|
||||
def test_add_tags_none(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
resource.upsert_or_delete_tags(None)
|
||||
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
|
||||
assert resource.get_tags(tenant_id=tenant_id) == {}
|
||||
|
||||
def test_clear_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
resource.clear_tags()
|
||||
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
|
||||
assert resource.get_tags(tenant_id=tenant_id) == {}
|
||||
|
||||
306
api/src/backend/api/tests/test_rbac.py
Normal file
306
api/src/backend/api/tests/test_rbac.py
Normal file
@@ -0,0 +1,306 @@
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
from unittest.mock import patch, ANY, Mock
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUserViewSet:
|
||||
def test_list_users_with_all_permissions(self, authenticated_client_rbac):
|
||||
response = authenticated_client_rbac.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert isinstance(response.json()["data"], list)
|
||||
|
||||
def test_list_users_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_retrieve_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert (
|
||||
response.json()["data"]["attributes"]["email"]
|
||||
== create_test_user_rbac.email
|
||||
)
|
||||
|
||||
def test_retrieve_user_with_no_roles(
|
||||
self, authenticated_client_rbac_noroles, create_test_user_rbac_no_roles
|
||||
):
|
||||
response = authenticated_client_rbac_noroles.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac_no_roles.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_retrieve_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
|
||||
|
||||
def test_create_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
|
||||
|
||||
def test_partial_update_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
updated_data = {
|
||||
"data": {
|
||||
"type": "users",
|
||||
"id": str(create_test_user_rbac.id),
|
||||
"attributes": {"name": "Updated Name"},
|
||||
},
|
||||
}
|
||||
response = authenticated_client_rbac.patch(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id}),
|
||||
data=updated_data,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["name"] == "Updated Name"
|
||||
|
||||
def test_partial_update_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
updated_data = {
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {"name": "Updated Name"},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.patch(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id}),
|
||||
data=updated_data,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_delete_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.delete(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
def test_delete_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.delete(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_me_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert (
|
||||
response.json()["data"]["attributes"]["email"]
|
||||
== create_test_user_rbac.email
|
||||
)
|
||||
|
||||
def test_me_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["email"] == "rbac_limited@rbac.com"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProviderViewSet:
|
||||
def test_list_providers_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("provider-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == len(providers_fixture)
|
||||
|
||||
def test_list_providers_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("provider-list")
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
def test_retrieve_provider_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["alias"] == provider.alias
|
||||
|
||||
def test_retrieve_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_create_provider_with_all_permissions(self, authenticated_client_rbac):
|
||||
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("provider-list"), data=payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["alias"] == "new_alias"
|
||||
|
||||
def test_create_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("provider-list"), data=payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_partial_update_provider_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"id": provider.id,
|
||||
"attributes": {"alias": "updated_alias"},
|
||||
},
|
||||
}
|
||||
response = authenticated_client_rbac.patch(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id}),
|
||||
data=payload,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["alias"] == "updated_alias"
|
||||
|
||||
def test_partial_update_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
update_payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {"alias": "updated_alias"},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.patch(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id}),
|
||||
data=update_payload,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.delete_provider_task.delay")
|
||||
def test_delete_provider_with_all_permissions(
|
||||
self,
|
||||
mock_delete_task,
|
||||
mock_task_get,
|
||||
authenticated_client_rbac,
|
||||
providers_fixture,
|
||||
tasks_fixture,
|
||||
):
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
mock_delete_task.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
provider1, *_ = providers_fixture
|
||||
response = authenticated_client_rbac.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider1.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_202_ACCEPTED
|
||||
mock_delete_task.assert_called_once_with(
|
||||
provider_id=str(provider1.id), tenant_id=ANY
|
||||
)
|
||||
assert "Content-Location" in response.headers
|
||||
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
|
||||
|
||||
def test_delete_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.check_provider_connection_task.delay")
|
||||
def test_connection_with_all_permissions(
|
||||
self,
|
||||
mock_provider_connection,
|
||||
mock_task_get,
|
||||
authenticated_client_rbac,
|
||||
providers_fixture,
|
||||
tasks_fixture,
|
||||
):
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
task_mock.status = "PENDING"
|
||||
mock_provider_connection.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
provider1, *_ = providers_fixture
|
||||
assert provider1.connected is None
|
||||
assert provider1.connection_last_checked_at is None
|
||||
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("provider-connection", kwargs={"pk": provider1.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_202_ACCEPTED
|
||||
mock_provider_connection.assert_called_once_with(
|
||||
provider_id=str(provider1.id), tenant_id=ANY
|
||||
)
|
||||
assert "Content-Location" in response.headers
|
||||
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
|
||||
|
||||
def test_connection_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("provider-connection", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
File diff suppressed because it is too large
Load Diff
@@ -17,6 +17,7 @@ from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Invitation,
|
||||
InvitationRoleRelationship,
|
||||
Membership,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
@@ -24,10 +25,13 @@ from api.models import (
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
Scan,
|
||||
StateChoices,
|
||||
Task,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
|
||||
@@ -176,10 +180,26 @@ class UserSerializer(BaseSerializerV1):
|
||||
"""
|
||||
|
||||
memberships = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
roles = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ["id", "name", "email", "company_name", "date_joined", "memberships"]
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"email",
|
||||
"company_name",
|
||||
"date_joined",
|
||||
"memberships",
|
||||
"roles",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"roles": {"read_only": True},
|
||||
}
|
||||
|
||||
included_serializers = {
|
||||
"roles": "api.v1.serializers.RoleSerializer",
|
||||
}
|
||||
|
||||
|
||||
class UserCreateSerializer(BaseWriteSerializer):
|
||||
@@ -235,6 +255,73 @@ class UserUpdateSerializer(BaseWriteSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class RoleResourceIdentifierSerializer(serializers.Serializer):
|
||||
resource_type = serializers.CharField(source="type")
|
||||
id = serializers.UUIDField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "role-identifier"
|
||||
|
||||
def to_representation(self, instance):
|
||||
"""
|
||||
Ensure 'type' is used in the output instead of 'resource_type'.
|
||||
"""
|
||||
representation = super().to_representation(instance)
|
||||
representation["type"] = representation.pop("resource_type", None)
|
||||
return representation
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""
|
||||
Map 'type' back to 'resource_type' during input.
|
||||
"""
|
||||
data["resource_type"] = data.pop("type", None)
|
||||
return super().to_internal_value(data)
|
||||
|
||||
|
||||
class UserRoleRelationshipSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for modifying user memberships
|
||||
"""
|
||||
|
||||
roles = serializers.ListField(
|
||||
child=RoleResourceIdentifierSerializer(),
|
||||
help_text="List of resource identifier objects representing roles.",
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
role_ids = [item["id"] for item in validated_data["roles"]]
|
||||
roles = Role.objects.filter(id__in=role_ids)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
new_relationships = [
|
||||
UserRoleRelationship(
|
||||
user=self.context.get("user"), role=r, tenant_id=tenant_id
|
||||
)
|
||||
for r in roles
|
||||
]
|
||||
UserRoleRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return self.context.get("user")
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
role_ids = [item["id"] for item in validated_data["roles"]]
|
||||
roles = Role.objects.filter(id__in=role_ids)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
instance.roles.clear()
|
||||
new_relationships = [
|
||||
UserRoleRelationship(user=instance, role=r, tenant_id=tenant_id)
|
||||
for r in roles
|
||||
]
|
||||
UserRoleRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return instance
|
||||
|
||||
class Meta:
|
||||
model = UserRoleRelationship
|
||||
fields = ["id", "roles"]
|
||||
|
||||
|
||||
# Tasks
|
||||
class TaskBase(serializers.ModelSerializer):
|
||||
state_mapping = {
|
||||
@@ -358,89 +445,199 @@ class MembershipSerializer(serializers.ModelSerializer):
|
||||
|
||||
# Provider Groups
|
||||
class ProviderGroupSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
providers = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
roles = serializers.ResourceRelatedField(
|
||||
queryset=Role.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
def validate(self, attrs):
|
||||
tenant = self.context["tenant_id"]
|
||||
name = attrs.get("name", self.instance.name if self.instance else None)
|
||||
|
||||
# Exclude the current instance when checking for uniqueness during updates
|
||||
queryset = ProviderGroup.objects.filter(tenant=tenant, name=name)
|
||||
if self.instance:
|
||||
queryset = queryset.exclude(pk=self.instance.pk)
|
||||
|
||||
if queryset.exists():
|
||||
if ProviderGroup.objects.filter(name=attrs.get("name")).exists():
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"name": "A provider group with this name already exists for this tenant."
|
||||
}
|
||||
{"name": "A provider group with this name already exists."}
|
||||
)
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = ["id", "name", "inserted_at", "updated_at", "providers", "url"]
|
||||
read_only_fields = ["id", "inserted_at", "updated_at"]
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"providers",
|
||||
"roles",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"roles": {"read_only": True},
|
||||
"url": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class ProviderGroupIncludedSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
class ProviderGroupIncludedSerializer(ProviderGroupSerializer):
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ProviderGroupUpdateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for updating the ProviderGroup model.
|
||||
Only allows "name" field to be updated.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ProviderGroupMembershipUpdateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for modifying provider group memberships
|
||||
"""
|
||||
|
||||
provider_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
help_text="List of provider UUIDs to add to the group",
|
||||
class ProviderGroupCreateSerializer(ProviderGroupSerializer):
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
roles = serializers.ResourceRelatedField(
|
||||
queryset=Role.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
def validate(self, attrs):
|
||||
tenant_id = self.context["tenant_id"]
|
||||
provider_ids = attrs.get("provider_ids", [])
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"providers",
|
||||
"roles",
|
||||
]
|
||||
|
||||
existing_provider_ids = set(
|
||||
Provider.objects.filter(
|
||||
id__in=provider_ids, tenant_id=tenant_id
|
||||
).values_list("id", flat=True)
|
||||
def create(self, validated_data):
|
||||
providers = validated_data.pop("providers", [])
|
||||
roles = validated_data.pop("roles", [])
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
provider_group = ProviderGroup.objects.create(
|
||||
tenant_id=tenant_id, **validated_data
|
||||
)
|
||||
provided_provider_ids = set(provider_ids)
|
||||
|
||||
missing_provider_ids = provided_provider_ids - existing_provider_ids
|
||||
|
||||
if missing_provider_ids:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"provider_ids": f"The following provider IDs do not exist: {', '.join(str(id) for id in missing_provider_ids)}"
|
||||
}
|
||||
through_model_instances = [
|
||||
ProviderGroupMembership(
|
||||
provider_group=provider_group,
|
||||
provider=provider,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for provider in providers
|
||||
]
|
||||
ProviderGroupMembership.objects.bulk_create(through_model_instances)
|
||||
|
||||
return super().validate(attrs)
|
||||
through_model_instances = [
|
||||
RoleProviderGroupRelationship(
|
||||
provider_group=provider_group,
|
||||
role=role,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for role in roles
|
||||
]
|
||||
RoleProviderGroupRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
return provider_group
|
||||
|
||||
|
||||
class ProviderGroupUpdateSerializer(ProviderGroupSerializer):
|
||||
def update(self, instance, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
if "providers" in validated_data:
|
||||
providers = validated_data.pop("providers")
|
||||
instance.providers.clear()
|
||||
through_model_instances = [
|
||||
ProviderGroupMembership(
|
||||
provider_group=instance,
|
||||
provider=provider,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for provider in providers
|
||||
]
|
||||
ProviderGroupMembership.objects.bulk_create(through_model_instances)
|
||||
|
||||
if "roles" in validated_data:
|
||||
roles = validated_data.pop("roles")
|
||||
instance.roles.clear()
|
||||
through_model_instances = [
|
||||
RoleProviderGroupRelationship(
|
||||
provider_group=instance,
|
||||
role=role,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for role in roles
|
||||
]
|
||||
RoleProviderGroupRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class ProviderResourceIdentifierSerializer(serializers.Serializer):
|
||||
resource_type = serializers.CharField(source="type")
|
||||
id = serializers.UUIDField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "provider-identifier"
|
||||
|
||||
def to_representation(self, instance):
|
||||
"""
|
||||
Ensure 'type' is used in the output instead of 'resource_type'.
|
||||
"""
|
||||
representation = super().to_representation(instance)
|
||||
representation["type"] = representation.pop("resource_type", None)
|
||||
return representation
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""
|
||||
Map 'type' back to 'resource_type' during input.
|
||||
"""
|
||||
data["resource_type"] = data.pop("type", None)
|
||||
return super().to_internal_value(data)
|
||||
|
||||
|
||||
class ProviderGroupMembershipSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for modifying provider_group memberships
|
||||
"""
|
||||
|
||||
providers = serializers.ListField(
|
||||
child=ProviderResourceIdentifierSerializer(),
|
||||
help_text="List of resource identifier objects representing providers.",
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
provider_ids = [item["id"] for item in validated_data["providers"]]
|
||||
providers = Provider.objects.filter(id__in=provider_ids)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
new_relationships = [
|
||||
ProviderGroupMembership(
|
||||
provider_group=self.context.get("provider_group"),
|
||||
provider=p,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for p in providers
|
||||
]
|
||||
ProviderGroupMembership.objects.bulk_create(new_relationships)
|
||||
|
||||
return self.context.get("provider_group")
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
provider_ids = [item["id"] for item in validated_data["providers"]]
|
||||
providers = Provider.objects.filter(id__in=provider_ids)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
instance.providers.clear()
|
||||
new_relationships = [
|
||||
ProviderGroupMembership(
|
||||
provider_group=instance, provider=p, tenant_id=tenant_id
|
||||
)
|
||||
for p in providers
|
||||
]
|
||||
ProviderGroupMembership.objects.bulk_create(new_relationships)
|
||||
|
||||
return instance
|
||||
|
||||
class Meta:
|
||||
model = ProviderGroupMembership
|
||||
fields = ["id", "provider_ids"]
|
||||
fields = ["id", "providers"]
|
||||
|
||||
|
||||
# Providers
|
||||
@@ -677,7 +874,7 @@ class ResourceSerializer(RLSSerializer):
|
||||
}
|
||||
)
|
||||
def get_tags(self, obj):
|
||||
return obj.get_tags()
|
||||
return obj.get_tags(self.context.get("tenant_id"))
|
||||
|
||||
def get_fields(self):
|
||||
"""`type` is a Python reserved keyword."""
|
||||
@@ -708,6 +905,7 @@ class FindingSerializer(RLSSerializer):
|
||||
"raw_result",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"url",
|
||||
# Relationships
|
||||
"scan",
|
||||
@@ -720,6 +918,7 @@ class FindingSerializer(RLSSerializer):
|
||||
}
|
||||
|
||||
|
||||
# To be removed when the related endpoint is removed as well
|
||||
class FindingDynamicFilterSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
@@ -728,6 +927,19 @@ class FindingDynamicFilterSerializer(serializers.Serializer):
|
||||
resource_name = "finding-dynamic-filters"
|
||||
|
||||
|
||||
class FindingMetadataSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
resource_types = serializers.ListField(
|
||||
child=serializers.CharField(), allow_empty=True
|
||||
)
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# tags = serializers.JSONField(help_text="Tags are described as key-value pairs.")
|
||||
|
||||
class Meta:
|
||||
resource_name = "findings-metadata"
|
||||
|
||||
|
||||
# Provider secrets
|
||||
class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
|
||||
@staticmethod
|
||||
@@ -800,7 +1012,7 @@ class KubernetesProviderSecret(serializers.Serializer):
|
||||
|
||||
class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
role_arn = serializers.CharField()
|
||||
external_id = serializers.CharField(required=False)
|
||||
external_id = serializers.CharField()
|
||||
role_session_name = serializers.CharField(required=False)
|
||||
session_duration = serializers.IntegerField(
|
||||
required=False, min_value=900, max_value=43200
|
||||
@@ -847,6 +1059,10 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
@@ -868,11 +1084,6 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An optional identifier to enhance security for role assumption; may be "
|
||||
"required by the role administrator.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
@@ -886,7 +1097,7 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
"required": ["role_arn"],
|
||||
"required": ["role_arn", "external_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
@@ -1034,6 +1245,14 @@ class InvitationSerializer(RLSSerializer):
|
||||
Serializer for the Invitation model.
|
||||
"""
|
||||
|
||||
roles = serializers.ResourceRelatedField(many=True, queryset=Role.objects.all())
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if tenant_id is not None:
|
||||
self.fields["roles"].queryset = Role.objects.filter(tenant_id=tenant_id)
|
||||
|
||||
class Meta:
|
||||
model = Invitation
|
||||
fields = [
|
||||
@@ -1043,6 +1262,7 @@ class InvitationSerializer(RLSSerializer):
|
||||
"email",
|
||||
"state",
|
||||
"token",
|
||||
"roles",
|
||||
"expires_at",
|
||||
"inviter",
|
||||
"url",
|
||||
@@ -1050,6 +1270,14 @@ class InvitationSerializer(RLSSerializer):
|
||||
|
||||
|
||||
class InvitationBaseWriteSerializer(BaseWriteSerializer):
|
||||
roles = serializers.ResourceRelatedField(many=True, queryset=Role.objects.all())
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if tenant_id is not None:
|
||||
self.fields["roles"].queryset = Role.objects.filter(tenant_id=tenant_id)
|
||||
|
||||
def validate_email(self, value):
|
||||
user = User.objects.filter(email=value).first()
|
||||
tenant_id = self.context["tenant_id"]
|
||||
@@ -1086,31 +1314,62 @@ class InvitationCreateSerializer(InvitationBaseWriteSerializer, RLSSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Invitation
|
||||
fields = ["email", "expires_at", "state", "token", "inviter"]
|
||||
fields = ["email", "expires_at", "state", "token", "inviter", "roles"]
|
||||
extra_kwargs = {
|
||||
"token": {"read_only": True},
|
||||
"state": {"read_only": True},
|
||||
"inviter": {"read_only": True},
|
||||
"expires_at": {"required": False},
|
||||
"roles": {"required": False},
|
||||
}
|
||||
|
||||
def create(self, validated_data):
|
||||
inviter = self.context.get("request").user
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
validated_data["inviter"] = inviter
|
||||
return super().create(validated_data)
|
||||
roles = validated_data.pop("roles", [])
|
||||
invitation = super().create(validated_data)
|
||||
for role in roles:
|
||||
InvitationRoleRelationship.objects.create(
|
||||
role=role, invitation=invitation, tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return invitation
|
||||
|
||||
|
||||
class InvitationUpdateSerializer(InvitationBaseWriteSerializer):
|
||||
roles = serializers.ResourceRelatedField(
|
||||
required=False, many=True, queryset=Role.objects.all()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Invitation
|
||||
fields = ["id", "email", "expires_at", "state", "token"]
|
||||
fields = ["id", "email", "expires_at", "state", "token", "roles"]
|
||||
extra_kwargs = {
|
||||
"token": {"read_only": True},
|
||||
"state": {"read_only": True},
|
||||
"expires_at": {"required": False},
|
||||
"email": {"required": False},
|
||||
"roles": {"required": False},
|
||||
}
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if "roles" in validated_data:
|
||||
roles = validated_data.pop("roles")
|
||||
instance.roles.clear()
|
||||
new_relationships = [
|
||||
InvitationRoleRelationship(
|
||||
role=r, invitation=instance, tenant_id=tenant_id
|
||||
)
|
||||
for r in roles
|
||||
]
|
||||
InvitationRoleRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
invitation = super().update(instance, validated_data)
|
||||
|
||||
return invitation
|
||||
|
||||
|
||||
class InvitationAcceptSerializer(RLSSerializer):
|
||||
"""Serializer for accepting an invitation."""
|
||||
@@ -1122,6 +1381,218 @@ class InvitationAcceptSerializer(RLSSerializer):
|
||||
fields = ["invitation_token"]
|
||||
|
||||
|
||||
# Roles
|
||||
|
||||
|
||||
class RoleSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
permission_state = serializers.SerializerMethodField()
|
||||
users = serializers.ResourceRelatedField(
|
||||
queryset=User.objects.all(), many=True, required=False
|
||||
)
|
||||
provider_groups = serializers.ResourceRelatedField(
|
||||
queryset=ProviderGroup.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if tenant_id is not None:
|
||||
self.fields["users"].queryset = User.objects.filter(
|
||||
membership__tenant__id=tenant_id
|
||||
)
|
||||
self.fields["provider_groups"].queryset = ProviderGroup.objects.filter(
|
||||
tenant_id=self.context.get("tenant_id")
|
||||
)
|
||||
|
||||
def get_permission_state(self, obj) -> str:
|
||||
return obj.permission_state
|
||||
|
||||
def validate(self, attrs):
|
||||
if Role.objects.filter(name=attrs.get("name")).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"name": "A role with this name already exists."}
|
||||
)
|
||||
|
||||
if attrs.get("manage_providers"):
|
||||
attrs["unlimited_visibility"] = True
|
||||
|
||||
# Prevent updates to the admin role
|
||||
if getattr(self.instance, "name", None) == "admin":
|
||||
raise serializers.ValidationError(
|
||||
{"name": "The admin role cannot be updated."}
|
||||
)
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"manage_users",
|
||||
"manage_account",
|
||||
# Disable for the first release
|
||||
# "manage_billing",
|
||||
# "manage_integrations",
|
||||
# /Disable for the first release
|
||||
"manage_providers",
|
||||
"manage_scans",
|
||||
"permission_state",
|
||||
"unlimited_visibility",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"provider_groups",
|
||||
"users",
|
||||
"invitations",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"url": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class RoleCreateSerializer(RoleSerializer):
|
||||
provider_groups = serializers.ResourceRelatedField(
|
||||
many=True, queryset=ProviderGroup.objects.all(), required=False
|
||||
)
|
||||
users = serializers.ResourceRelatedField(
|
||||
many=True, queryset=User.objects.all(), required=False
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
provider_groups = validated_data.pop("provider_groups", [])
|
||||
users = validated_data.pop("users", [])
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
role = Role.objects.create(tenant_id=tenant_id, **validated_data)
|
||||
|
||||
through_model_instances = [
|
||||
RoleProviderGroupRelationship(
|
||||
role=role,
|
||||
provider_group=provider_group,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for provider_group in provider_groups
|
||||
]
|
||||
RoleProviderGroupRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
through_model_instances = [
|
||||
UserRoleRelationship(
|
||||
role=role,
|
||||
user=user,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for user in users
|
||||
]
|
||||
UserRoleRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
return role
|
||||
|
||||
|
||||
class RoleUpdateSerializer(RoleSerializer):
|
||||
def update(self, instance, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
if "provider_groups" in validated_data:
|
||||
provider_groups = validated_data.pop("provider_groups")
|
||||
instance.provider_groups.clear()
|
||||
through_model_instances = [
|
||||
RoleProviderGroupRelationship(
|
||||
role=instance,
|
||||
provider_group=provider_group,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for provider_group in provider_groups
|
||||
]
|
||||
RoleProviderGroupRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
if "users" in validated_data:
|
||||
users = validated_data.pop("users")
|
||||
instance.users.clear()
|
||||
through_model_instances = [
|
||||
UserRoleRelationship(
|
||||
role=instance,
|
||||
user=user,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for user in users
|
||||
]
|
||||
UserRoleRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class ProviderGroupResourceIdentifierSerializer(serializers.Serializer):
|
||||
resource_type = serializers.CharField(source="type")
|
||||
id = serializers.UUIDField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "provider-group-identifier"
|
||||
|
||||
def to_representation(self, instance):
|
||||
"""
|
||||
Ensure 'type' is used in the output instead of 'resource_type'.
|
||||
"""
|
||||
representation = super().to_representation(instance)
|
||||
representation["type"] = representation.pop("resource_type", None)
|
||||
return representation
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""
|
||||
Map 'type' back to 'resource_type' during input.
|
||||
"""
|
||||
data["resource_type"] = data.pop("type", None)
|
||||
return super().to_internal_value(data)
|
||||
|
||||
|
||||
class RoleProviderGroupRelationshipSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for modifying role memberships
|
||||
"""
|
||||
|
||||
provider_groups = serializers.ListField(
|
||||
child=ProviderGroupResourceIdentifierSerializer(),
|
||||
help_text="List of resource identifier objects representing provider groups.",
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
provider_group_ids = [item["id"] for item in validated_data["provider_groups"]]
|
||||
provider_groups = ProviderGroup.objects.filter(id__in=provider_group_ids)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
new_relationships = [
|
||||
RoleProviderGroupRelationship(
|
||||
role=self.context.get("role"), provider_group=pg, tenant_id=tenant_id
|
||||
)
|
||||
for pg in provider_groups
|
||||
]
|
||||
RoleProviderGroupRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return self.context.get("role")
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
provider_group_ids = [item["id"] for item in validated_data["provider_groups"]]
|
||||
provider_groups = ProviderGroup.objects.filter(id__in=provider_group_ids)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
instance.provider_groups.clear()
|
||||
new_relationships = [
|
||||
RoleProviderGroupRelationship(
|
||||
role=instance, provider_group=pg, tenant_id=tenant_id
|
||||
)
|
||||
for pg in provider_groups
|
||||
]
|
||||
RoleProviderGroupRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return instance
|
||||
|
||||
class Meta:
|
||||
model = RoleProviderGroupRelationship
|
||||
fields = ["id", "provider_groups"]
|
||||
|
||||
|
||||
# Compliance overview
|
||||
|
||||
|
||||
@@ -1265,7 +1736,7 @@ class OverviewProviderSerializer(serializers.Serializer):
|
||||
"properties": {
|
||||
"pass": {"type": "integer"},
|
||||
"fail": {"type": "integer"},
|
||||
"manual": {"type": "integer"},
|
||||
"muted": {"type": "integer"},
|
||||
"total": {"type": "integer"},
|
||||
},
|
||||
}
|
||||
@@ -1274,7 +1745,7 @@ class OverviewProviderSerializer(serializers.Serializer):
|
||||
return {
|
||||
"pass": obj["findings_passed"],
|
||||
"fail": obj["findings_failed"],
|
||||
"manual": obj["findings_manual"],
|
||||
"muted": obj["findings_muted"],
|
||||
"total": obj["total_findings"],
|
||||
}
|
||||
|
||||
@@ -1334,6 +1805,24 @@ class OverviewSeveritySerializer(serializers.Serializer):
|
||||
return {"version": "v1"}
|
||||
|
||||
|
||||
class OverviewServiceSerializer(serializers.Serializer):
|
||||
id = serializers.CharField(source="service")
|
||||
total = serializers.IntegerField()
|
||||
_pass = serializers.IntegerField()
|
||||
fail = serializers.IntegerField()
|
||||
muted = serializers.IntegerField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "services-overview"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["pass"] = self.fields.pop("_pass")
|
||||
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
|
||||
# Schedules
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from django.conf import settings
|
||||
from django.urls import include, path
|
||||
from drf_spectacular.views import SpectacularRedocView
|
||||
from rest_framework_nested import routers
|
||||
@@ -11,16 +12,20 @@ from api.v1.views import (
|
||||
InvitationViewSet,
|
||||
MembershipViewSet,
|
||||
OverviewViewSet,
|
||||
ProviderGroupProvidersRelationshipView,
|
||||
ProviderGroupViewSet,
|
||||
ProviderSecretViewSet,
|
||||
ProviderViewSet,
|
||||
ResourceViewSet,
|
||||
RoleProviderGroupRelationshipView,
|
||||
RoleViewSet,
|
||||
ScanViewSet,
|
||||
ScheduleViewSet,
|
||||
SchemaView,
|
||||
TaskViewSet,
|
||||
TenantMembersViewSet,
|
||||
TenantViewSet,
|
||||
UserRoleRelationshipView,
|
||||
UserViewSet,
|
||||
)
|
||||
|
||||
@@ -29,11 +34,12 @@ router = routers.DefaultRouter(trailing_slash=False)
|
||||
router.register(r"users", UserViewSet, basename="user")
|
||||
router.register(r"tenants", TenantViewSet, basename="tenant")
|
||||
router.register(r"providers", ProviderViewSet, basename="provider")
|
||||
router.register(r"provider_groups", ProviderGroupViewSet, basename="providergroup")
|
||||
router.register(r"provider-groups", ProviderGroupViewSet, basename="providergroup")
|
||||
router.register(r"scans", ScanViewSet, basename="scan")
|
||||
router.register(r"tasks", TaskViewSet, basename="task")
|
||||
router.register(r"resources", ResourceViewSet, basename="resource")
|
||||
router.register(r"findings", FindingViewSet, basename="finding")
|
||||
router.register(r"roles", RoleViewSet, basename="role")
|
||||
router.register(
|
||||
r"compliance-overviews", ComplianceOverviewViewSet, basename="complianceoverview"
|
||||
)
|
||||
@@ -80,9 +86,33 @@ urlpatterns = [
|
||||
InvitationAcceptViewSet.as_view({"post": "accept"}),
|
||||
name="invitation-accept",
|
||||
),
|
||||
path(
|
||||
"roles/<uuid:pk>/relationships/provider_groups",
|
||||
RoleProviderGroupRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="role-provider-groups-relationship",
|
||||
),
|
||||
path(
|
||||
"users/<uuid:pk>/relationships/roles",
|
||||
UserRoleRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="user-roles-relationship",
|
||||
),
|
||||
path(
|
||||
"provider-groups/<uuid:pk>/relationships/providers",
|
||||
ProviderGroupProvidersRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="provider_group-providers-relationship",
|
||||
),
|
||||
path("", include(router.urls)),
|
||||
path("", include(tenants_router.urls)),
|
||||
path("", include(users_router.urls)),
|
||||
path("schema", SchemaView.as_view(), name="schema"),
|
||||
path("docs", SpectacularRedocView.as_view(url_name="schema"), name="docs"),
|
||||
]
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns += [path("silk/", include("silk.urls", namespace="silk"))]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,21 @@
|
||||
from celery import Celery, Task
|
||||
from config.env import env
|
||||
|
||||
BROKER_VISIBILITY_TIMEOUT = env.int("DJANGO_BROKER_VISIBILITY_TIMEOUT", default=86400)
|
||||
|
||||
celery_app = Celery("tasks")
|
||||
|
||||
celery_app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
celery_app.conf.update(result_extended=True, result_expires=None)
|
||||
|
||||
celery_app.conf.broker_transport_options = {
|
||||
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
|
||||
}
|
||||
celery_app.conf.result_backend_transport_options = {
|
||||
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
|
||||
}
|
||||
celery_app.conf.visibility_timeout = BROKER_VISIBILITY_TIMEOUT
|
||||
|
||||
celery_app.autodiscover_tasks(["api"])
|
||||
|
||||
|
||||
@@ -35,10 +46,10 @@ class RLSTask(Task):
|
||||
**options,
|
||||
)
|
||||
task_result_instance = TaskResult.objects.get(task_id=result.task_id)
|
||||
from api.db_utils import tenant_transaction
|
||||
from api.db_utils import rls_transaction
|
||||
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
APITask.objects.create(
|
||||
id=task_result_instance.task_id,
|
||||
tenant_id=tenant_id,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=True)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["*"])
|
||||
|
||||
@@ -38,3 +37,9 @@ REST_FRAMEWORK["DEFAULT_FILTER_BACKENDS"] = tuple( # noqa: F405
|
||||
) + ("api.filters.CustomDjangoFilterBackend",)
|
||||
|
||||
SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0="
|
||||
|
||||
MIDDLEWARE += [ # noqa: F405
|
||||
"silk.middleware.SilkyMiddleware",
|
||||
]
|
||||
|
||||
INSTALLED_APPS += ["silk"] # noqa: F405
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])
|
||||
|
||||
@@ -10,8 +9,8 @@ DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": "prowler_db_test",
|
||||
"USER": env("POSTGRES_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="S3cret"),
|
||||
"USER": env("POSTGRES_USER", default="prowler_admin"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="postgres"),
|
||||
"HOST": env("POSTGRES_HOST", default="localhost"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
|
||||
@@ -1,35 +1,39 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from django.db import connections as django_connections, connection as django_connection
|
||||
from django.db import connection as django_connection
|
||||
from django.db import connections as django_connections
|
||||
from django.urls import reverse
|
||||
from django_celery_results.models import TaskResult
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
)
|
||||
from api.models import (
|
||||
User,
|
||||
Invitation,
|
||||
Membership,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
Task,
|
||||
Membership,
|
||||
ProviderSecret,
|
||||
Invitation,
|
||||
ComplianceOverview,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.v1.serializers import TokenSerializer
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
|
||||
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
|
||||
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
|
||||
@@ -83,8 +87,150 @@ def create_test_user(django_db_setup, django_db_blocker):
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac(django_db_setup, django_db_blocker, tenants_fixture):
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email="rbac@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = tenants_fixture[0]
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
Role.objects.create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=Role.objects.get(name="admin"),
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac_no_roles(django_db_setup, django_db_blocker, tenants_fixture):
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email="rbac_noroles@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = tenants_fixture[0]
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac_limited(django_db_setup, django_db_blocker):
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing_limited",
|
||||
email="rbac_limited@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = Tenant.objects.create(
|
||||
name="Tenant Test",
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
Role.objects.create(
|
||||
name="limited",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=Role.objects.get(name="limited"),
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client(create_test_user, tenants_fixture, client):
|
||||
def authenticated_client_rbac(create_test_user_rbac, tenants_fixture, client):
|
||||
client.user = create_test_user_rbac
|
||||
tenant_id = tenants_fixture[0].id
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_noroles(
|
||||
create_test_user_rbac_no_roles, tenants_fixture, client
|
||||
):
|
||||
client.user = create_test_user_rbac_no_roles
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac_noroles@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_no_permissions_rbac(
|
||||
create_test_user_rbac_limited, tenants_fixture, client
|
||||
):
|
||||
client.user = create_test_user_rbac_limited
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac_limited@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client(
|
||||
create_test_user, tenants_fixture, set_user_admin_roles_fixture, client
|
||||
):
|
||||
client.user = create_test_user
|
||||
serializer = TokenSerializer(
|
||||
data={"type": "tokens", "email": TEST_USER, "password": TEST_PASSWORD}
|
||||
@@ -104,6 +250,7 @@ def authenticated_api_client(create_test_user, tenants_fixture):
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
|
||||
return client
|
||||
|
||||
|
||||
@@ -128,13 +275,37 @@ def tenants_fixture(create_test_user):
|
||||
tenant3 = Tenant.objects.create(
|
||||
name="Tenant Three",
|
||||
)
|
||||
|
||||
return tenant1, tenant2, tenant3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def set_user_admin_roles_fixture(create_test_user, tenants_fixture):
|
||||
user = create_test_user
|
||||
for tenant in tenants_fixture[:2]:
|
||||
with rls_transaction(str(tenant.id)):
|
||||
role = Role.objects.create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def invitations_fixture(create_test_user, tenants_fixture):
|
||||
user = create_test_user
|
||||
*_, tenant = tenants_fixture
|
||||
tenant = tenants_fixture[0]
|
||||
valid_invitation = Invitation.objects.create(
|
||||
email="testing@prowler.com",
|
||||
state=Invitation.State.PENDING,
|
||||
@@ -153,6 +324,20 @@ def invitations_fixture(create_test_user, tenants_fixture):
|
||||
return valid_invitation, expired_invitation
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def users_fixture(django_user_model):
|
||||
user1 = User.objects.create_user(
|
||||
name="user1", email="test_unit0@prowler.com", password="S3cret"
|
||||
)
|
||||
user2 = User.objects.create_user(
|
||||
name="user2", email="test_unit1@prowler.com", password="S3cret"
|
||||
)
|
||||
user3 = User.objects.create_user(
|
||||
name="user3", email="test_unit2@prowler.com", password="S3cret"
|
||||
)
|
||||
return user1, user2, user3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def providers_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
@@ -210,6 +395,74 @@ def provider_groups_fixture(tenants_fixture):
|
||||
return pgroup1, pgroup2, pgroup3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def admin_role_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
|
||||
return Role.objects.get_or_create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def roles_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
role1 = Role.objects.create(
|
||||
name="Role One",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=False,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
role2 = Role.objects.create(
|
||||
name="Role Two",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
role3 = Role.objects.create(
|
||||
name="Role Three",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
role4 = Role.objects.create(
|
||||
name="Role Four",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
|
||||
return role1, role2, role3, role4
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def provider_secret_fixture(providers_fixture):
|
||||
return tuple(
|
||||
@@ -373,6 +626,7 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
|
||||
finding1.add_resources([resource1])
|
||||
@@ -398,6 +652,7 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description orange juice",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
|
||||
finding2.add_resources([resource2])
|
||||
@@ -537,10 +792,107 @@ def get_api_tokens(
|
||||
data=json_body,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
return response.json()["data"]["attributes"]["access"], response.json()["data"][
|
||||
"attributes"
|
||||
]["refresh"]
|
||||
return (
|
||||
response.json()["data"]["attributes"]["access"],
|
||||
response.json()["data"]["attributes"]["refresh"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def scan_summaries_fixture(tenants_fixture, providers_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
scan = Scan.objects.create(
|
||||
name="overview scan",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
check_id="check1",
|
||||
service="service1",
|
||||
severity="high",
|
||||
region="region1",
|
||||
_pass=1,
|
||||
fail=0,
|
||||
muted=0,
|
||||
total=1,
|
||||
new=1,
|
||||
changed=0,
|
||||
unchanged=0,
|
||||
fail_new=0,
|
||||
fail_changed=0,
|
||||
pass_new=1,
|
||||
pass_changed=0,
|
||||
muted_new=0,
|
||||
muted_changed=0,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
check_id="check1",
|
||||
service="service1",
|
||||
severity="high",
|
||||
region="region2",
|
||||
_pass=0,
|
||||
fail=1,
|
||||
muted=1,
|
||||
total=2,
|
||||
new=2,
|
||||
changed=0,
|
||||
unchanged=0,
|
||||
fail_new=1,
|
||||
fail_changed=0,
|
||||
pass_new=0,
|
||||
pass_changed=0,
|
||||
muted_new=1,
|
||||
muted_changed=0,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
check_id="check2",
|
||||
service="service2",
|
||||
severity="critical",
|
||||
region="region1",
|
||||
_pass=1,
|
||||
fail=0,
|
||||
muted=0,
|
||||
total=1,
|
||||
new=1,
|
||||
changed=0,
|
||||
unchanged=0,
|
||||
fail_new=0,
|
||||
fail_changed=0,
|
||||
pass_new=1,
|
||||
pass_changed=0,
|
||||
muted_new=0,
|
||||
muted_changed=0,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(items):
|
||||
"""Ensure test_rbac.py is executed first."""
|
||||
items.sort(key=lambda item: 0 if "test_rbac.py" in item.nodeid else 1)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
# Apply the mock before the test session starts. This is necessary to avoid admin error when running the
|
||||
# 0004_rbac_missing_admin_roles migration
|
||||
patch("api.db_router.MainRouter.admin_db", new="default").start()
|
||||
|
||||
|
||||
def pytest_unconfigure(config):
|
||||
# Stop all patches after the test session ends. This is necessary to avoid admin error when running the
|
||||
# 0004_rbac_missing_admin_roles migration
|
||||
patch.stopall()
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.db import transaction
|
||||
|
||||
from api.db_utils import batch_delete
|
||||
from api.models import Finding, Provider, Resource, Scan, ScanSummary
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import batch_delete, rls_transaction
|
||||
from api.models import Finding, Provider, Resource, Scan, ScanSummary, Tenant
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
@@ -49,3 +50,26 @@ def delete_provider(pk: str):
|
||||
deletion_summary.update(provider_summary)
|
||||
|
||||
return deletion_summary
|
||||
|
||||
|
||||
def delete_tenant(pk: str):
|
||||
"""
|
||||
Gracefully deletes an instance of a tenant along with its related data.
|
||||
|
||||
Args:
|
||||
pk (str): The primary key of the Tenant instance to delete.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary with the count of deleted objects per model,
|
||||
including related models.
|
||||
"""
|
||||
deletion_summary = {}
|
||||
|
||||
for provider in Provider.objects.using(MainRouter.admin_db).filter(tenant_id=pk):
|
||||
with rls_transaction(pk):
|
||||
summary = delete_provider(provider.id)
|
||||
deletion_summary.update(summary)
|
||||
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(id=pk).delete()
|
||||
|
||||
return deletion_summary
|
||||
|
||||
@@ -11,7 +11,7 @@ from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_utils import tenant_transaction
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
@@ -69,7 +69,7 @@ def _store_resources(
|
||||
- tuple[str, str]: A tuple containing the resource UID and region.
|
||||
|
||||
"""
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
resource_instance, created = Resource.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_instance,
|
||||
@@ -86,7 +86,7 @@ def _store_resources(
|
||||
resource_instance.service = finding.service_name
|
||||
resource_instance.type = finding.resource_type
|
||||
resource_instance.save()
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
tags = [
|
||||
ResourceTag.objects.get_or_create(
|
||||
tenant_id=tenant_id, key=key, value=value
|
||||
@@ -116,13 +116,12 @@ def perform_prowler_scan(
|
||||
ValueError: If the provider cannot be connected.
|
||||
|
||||
"""
|
||||
generate_compliance = False
|
||||
check_status_by_region = {}
|
||||
exception = None
|
||||
unique_resources = set()
|
||||
start_time = time.time()
|
||||
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
scan_instance = Scan.objects.get(pk=scan_id)
|
||||
scan_instance.state = StateChoices.EXECUTING
|
||||
@@ -130,7 +129,7 @@ def perform_prowler_scan(
|
||||
scan_instance.save()
|
||||
|
||||
try:
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
try:
|
||||
prowler_provider = initialize_prowler_provider(provider_instance)
|
||||
provider_instance.connected = True
|
||||
@@ -145,7 +144,6 @@ def perform_prowler_scan(
|
||||
)
|
||||
provider_instance.save()
|
||||
|
||||
generate_compliance = provider_instance.provider != Provider.ProviderChoices.GCP
|
||||
prowler_scan = ProwlerScan(provider=prowler_provider, checks=checks_to_execute)
|
||||
|
||||
resource_cache = {}
|
||||
@@ -154,9 +152,12 @@ def perform_prowler_scan(
|
||||
|
||||
for progress, findings in prowler_scan.scan():
|
||||
for finding in findings:
|
||||
if finding is None:
|
||||
logger.error(f"None finding detected on scan {scan_id}.")
|
||||
continue
|
||||
for attempt in range(CELERY_DEADLOCK_ATTEMPTS):
|
||||
try:
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
# Process resource
|
||||
resource_uid = finding.resource_uid
|
||||
if resource_uid not in resource_cache:
|
||||
@@ -178,7 +179,10 @@ def perform_prowler_scan(
|
||||
|
||||
# Update resource fields if necessary
|
||||
updated_fields = []
|
||||
if resource_instance.region != finding.region:
|
||||
if (
|
||||
finding.region
|
||||
and resource_instance.region != finding.region
|
||||
):
|
||||
resource_instance.region = finding.region
|
||||
updated_fields.append("region")
|
||||
if resource_instance.service != finding.service_name:
|
||||
@@ -188,7 +192,7 @@ def perform_prowler_scan(
|
||||
resource_instance.type = finding.resource_type
|
||||
updated_fields.append("type")
|
||||
if updated_fields:
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
resource_instance.save(update_fields=updated_fields)
|
||||
except (OperationalError, IntegrityError) as db_err:
|
||||
if attempt < CELERY_DEADLOCK_ATTEMPTS - 1:
|
||||
@@ -203,7 +207,7 @@ def perform_prowler_scan(
|
||||
|
||||
# Update tags
|
||||
tags = []
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
for key, value in finding.resource_tags.items():
|
||||
tag_key = (key, value)
|
||||
if tag_key not in tag_cache:
|
||||
@@ -219,26 +223,32 @@ def perform_prowler_scan(
|
||||
unique_resources.add((resource_instance.uid, resource_instance.region))
|
||||
|
||||
# Process finding
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
finding_uid = finding.uid
|
||||
last_first_seen_at = None
|
||||
if finding_uid not in last_status_cache:
|
||||
most_recent_finding = (
|
||||
Finding.objects.filter(uid=finding_uid)
|
||||
.order_by("-id")
|
||||
.values("status")
|
||||
Finding.all_objects.filter(
|
||||
tenant_id=tenant_id, uid=finding_uid
|
||||
)
|
||||
.order_by("-inserted_at")
|
||||
.values("status", "first_seen_at")
|
||||
.first()
|
||||
)
|
||||
last_status = (
|
||||
most_recent_finding["status"]
|
||||
if most_recent_finding
|
||||
else None
|
||||
)
|
||||
last_status_cache[finding_uid] = last_status
|
||||
last_status = None
|
||||
if most_recent_finding:
|
||||
last_status = most_recent_finding["status"]
|
||||
last_first_seen_at = most_recent_finding["first_seen_at"]
|
||||
last_status_cache[finding_uid] = last_status, last_first_seen_at
|
||||
else:
|
||||
last_status = last_status_cache[finding_uid]
|
||||
last_status, last_first_seen_at = last_status_cache[finding_uid]
|
||||
|
||||
status = FindingStatus[finding.status]
|
||||
delta = _create_finding_delta(last_status, status)
|
||||
# For the findings prior to the change, when a first finding is found with delta!="new" it will be assigned a current date as first_seen_at and the successive findings with the same UID will always get the date of the previous finding.
|
||||
# For new findings, when a finding (delta="new") is found for the first time, the first_seen_at attribute will be assigned the current date, the following findings will get that date.
|
||||
if not last_first_seen_at:
|
||||
last_first_seen_at = datetime.now(tz=timezone.utc)
|
||||
|
||||
# Create the finding
|
||||
finding_instance = Finding.objects.create(
|
||||
@@ -253,11 +263,12 @@ def perform_prowler_scan(
|
||||
raw_result=finding.raw,
|
||||
check_id=finding.check_id,
|
||||
scan=scan_instance,
|
||||
first_seen_at=last_first_seen_at,
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
# Update compliance data if applicable
|
||||
if not generate_compliance or finding.status.value == "MUTED":
|
||||
if finding.status.value == "MUTED":
|
||||
continue
|
||||
|
||||
region_dict = check_status_by_region.setdefault(finding.region, {})
|
||||
@@ -267,7 +278,7 @@ def perform_prowler_scan(
|
||||
region_dict[finding.check_id] = finding.status.value
|
||||
|
||||
# Update scan progress
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
scan_instance.progress = progress
|
||||
scan_instance.save()
|
||||
|
||||
@@ -279,13 +290,13 @@ def perform_prowler_scan(
|
||||
scan_instance.state = StateChoices.FAILED
|
||||
|
||||
finally:
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
scan_instance.duration = time.time() - start_time
|
||||
scan_instance.completed_at = datetime.now(tz=timezone.utc)
|
||||
scan_instance.unique_resource_count = len(unique_resources)
|
||||
scan_instance.save()
|
||||
|
||||
if exception is None and generate_compliance:
|
||||
if exception is None:
|
||||
try:
|
||||
regions = prowler_provider.get_regions()
|
||||
except AttributeError:
|
||||
@@ -330,7 +341,7 @@ def perform_prowler_scan(
|
||||
total_requirements=compliance["total_requirements"],
|
||||
)
|
||||
)
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceOverview.objects.bulk_create(compliance_overview_objects)
|
||||
|
||||
if exception is not None:
|
||||
@@ -368,8 +379,8 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
- muted_new: Muted findings with a delta of 'new'.
|
||||
- muted_changed: Muted findings with a delta of 'changed'.
|
||||
"""
|
||||
with tenant_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(scan_id=scan_id)
|
||||
with rls_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
aggregation = findings.values(
|
||||
"check_id",
|
||||
@@ -464,7 +475,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
),
|
||||
)
|
||||
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
scan_aggregations = {
|
||||
ScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
|
||||
@@ -4,10 +4,10 @@ from celery import shared_task
|
||||
from config.celery import RLSTask
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
from tasks.jobs.deletion import delete_provider
|
||||
from tasks.jobs.deletion import delete_provider, delete_tenant
|
||||
from tasks.jobs.scan import aggregate_findings, perform_prowler_scan
|
||||
|
||||
from api.db_utils import tenant_transaction
|
||||
from api.db_utils import rls_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.models import Provider, Scan
|
||||
|
||||
@@ -99,7 +99,7 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
"""
|
||||
task_id = self.request.id
|
||||
|
||||
with tenant_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id):
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
periodic_task_instance = PeriodicTask.objects.get(
|
||||
name=f"scan-perform-scheduled-{provider_id}"
|
||||
@@ -134,3 +134,8 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
@shared_task(name="scan-summary")
|
||||
def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="tenant-deletion")
|
||||
def delete_tenant_task(tenant_id: str):
|
||||
return delete_tenant(pk=tenant_id)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import pytest
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from tasks.jobs.deletion import delete_provider
|
||||
from tasks.jobs.deletion import delete_provider, delete_tenant
|
||||
|
||||
from api.models import Provider
|
||||
from api.models import Provider, Tenant
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteInstance:
|
||||
def test_delete_instance_success(self, providers_fixture):
|
||||
class TestDeleteProvider:
|
||||
def test_delete_provider_success(self, providers_fixture):
|
||||
instance = providers_fixture[0]
|
||||
result = delete_provider(instance.id)
|
||||
|
||||
@@ -15,8 +15,46 @@ class TestDeleteInstance:
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
Provider.objects.get(pk=instance.id)
|
||||
|
||||
def test_delete_instance_does_not_exist(self):
|
||||
def test_delete_provider_does_not_exist(self):
|
||||
non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645"
|
||||
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
delete_provider(non_existent_pk)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteTenant:
|
||||
def test_delete_tenant_success(self, tenants_fixture, providers_fixture):
|
||||
"""
|
||||
Test successful deletion of a tenant and its related data.
|
||||
"""
|
||||
tenant = tenants_fixture[0]
|
||||
providers = Provider.objects.filter(tenant_id=tenant.id)
|
||||
|
||||
# Ensure the tenant and related providers exist before deletion
|
||||
assert Tenant.objects.filter(id=tenant.id).exists()
|
||||
assert providers.exists()
|
||||
|
||||
# Call the function and validate the result
|
||||
deletion_summary = delete_tenant(tenant.id)
|
||||
|
||||
assert deletion_summary is not None
|
||||
assert not Tenant.objects.filter(id=tenant.id).exists()
|
||||
assert not Provider.objects.filter(tenant_id=tenant.id).exists()
|
||||
|
||||
def test_delete_tenant_with_no_providers(self, tenants_fixture):
|
||||
"""
|
||||
Test deletion of a tenant with no related providers.
|
||||
"""
|
||||
tenant = tenants_fixture[1] # Assume this tenant has no providers
|
||||
providers = Provider.objects.filter(tenant_id=tenant.id)
|
||||
|
||||
# Ensure the tenant exists but has no related providers
|
||||
assert Tenant.objects.filter(id=tenant.id).exists()
|
||||
assert not providers.exists()
|
||||
|
||||
# Call the function and validate the result
|
||||
deletion_summary = delete_tenant(tenant.id)
|
||||
|
||||
assert deletion_summary == {} # No providers, so empty summary
|
||||
assert not Tenant.objects.filter(id=tenant.id).exists()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import uuid
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -26,7 +27,7 @@ class TestPerformScan:
|
||||
providers_fixture,
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.tenant_transaction"),
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
@@ -165,10 +166,10 @@ class TestPerformScan:
|
||||
"tasks.jobs.scan.initialize_prowler_provider",
|
||||
side_effect=Exception("Connection error"),
|
||||
)
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
@patch("api.db_utils.rls_transaction")
|
||||
def test_perform_prowler_scan_no_connection(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_rls_transaction,
|
||||
mock_initialize_prowler_provider,
|
||||
mock_prowler_scan_class,
|
||||
tenants_fixture,
|
||||
@@ -205,14 +206,14 @@ class TestPerformScan:
|
||||
|
||||
@patch("api.models.ResourceTag.objects.get_or_create")
|
||||
@patch("api.models.Resource.objects.get_or_create")
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
@patch("api.db_utils.rls_transaction")
|
||||
def test_store_resources_new_resource(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_rls_transaction,
|
||||
mock_get_or_create_resource,
|
||||
mock_get_or_create_tag,
|
||||
):
|
||||
tenant_id = "tenant123"
|
||||
tenant_id = uuid.uuid4()
|
||||
provider_instance = MagicMock()
|
||||
provider_instance.id = "provider456"
|
||||
|
||||
@@ -253,14 +254,14 @@ class TestPerformScan:
|
||||
|
||||
@patch("api.models.ResourceTag.objects.get_or_create")
|
||||
@patch("api.models.Resource.objects.get_or_create")
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
@patch("api.db_utils.rls_transaction")
|
||||
def test_store_resources_existing_resource(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_rls_transaction,
|
||||
mock_get_or_create_resource,
|
||||
mock_get_or_create_tag,
|
||||
):
|
||||
tenant_id = "tenant123"
|
||||
tenant_id = uuid.uuid4()
|
||||
provider_instance = MagicMock()
|
||||
provider_instance.id = "provider456"
|
||||
|
||||
@@ -310,14 +311,14 @@ class TestPerformScan:
|
||||
|
||||
@patch("api.models.ResourceTag.objects.get_or_create")
|
||||
@patch("api.models.Resource.objects.get_or_create")
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
@patch("api.db_utils.rls_transaction")
|
||||
def test_store_resources_with_tags(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_rls_transaction,
|
||||
mock_get_or_create_resource,
|
||||
mock_get_or_create_tag,
|
||||
):
|
||||
tenant_id = "tenant123"
|
||||
tenant_id = uuid.uuid4()
|
||||
provider_instance = MagicMock()
|
||||
provider_instance.id = "provider456"
|
||||
|
||||
|
||||
11
codecov.yml
Normal file
11
codecov.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
component_management:
|
||||
individual_components:
|
||||
- component_id: "prowler"
|
||||
paths:
|
||||
- "prowler/**"
|
||||
- component_id: "api"
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
comment:
|
||||
layout: "header, diff, flags, components"
|
||||
301
contrib/aws/aws-sso-docker/readme.md
Normal file
301
contrib/aws/aws-sso-docker/readme.md
Normal file
@@ -0,0 +1,301 @@
|
||||
# AWS SSO to Prowler Automation Script
|
||||
|
||||
## Table of Contents
|
||||
- [Introduction](#introduction)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Setup](#setup)
|
||||
- [Script Overview](#script-overview)
|
||||
- [Usage](#usage)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
- [Customization](#customization)
|
||||
- [Security Considerations](#security-considerations)
|
||||
- [License](#license)
|
||||
|
||||
## Introduction
|
||||
|
||||
This repository provides a Bash script that automates the process of logging into AWS Single Sign-On (SSO), extracting temporary AWS credentials, and running **Prowler**—a security tool that performs AWS security best practices assessments—inside a Docker container using those credentials.
|
||||
|
||||
By following this guide, you can streamline your AWS security assessments, ensuring that you consistently apply best practices across your AWS accounts.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you begin, ensure that you have the following tools installed and properly configured on your system:
|
||||
|
||||
1. **AWS CLI v2**
|
||||
- AWS SSO support is available from AWS CLI version 2 onwards.
|
||||
- [Installation Guide](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html)
|
||||
|
||||
2. **jq**
|
||||
- A lightweight and flexible command-line JSON processor.
|
||||
- **macOS (Homebrew):**
|
||||
```bash
|
||||
brew install jq
|
||||
```
|
||||
- **Ubuntu/Debian:**
|
||||
```bash
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y jq
|
||||
```
|
||||
- **Windows:**
|
||||
- [Download jq](https://stedolan.github.io/jq/download/)
|
||||
|
||||
3. **Docker**
|
||||
- Ensure Docker is installed and running on your system.
|
||||
- [Docker Installation Guide](https://docs.docker.com/get-docker/)
|
||||
|
||||
4. **AWS SSO Profile Configuration**
|
||||
- Ensure that you have configured an AWS CLI profile with SSO.
|
||||
- [Configuring AWS CLI with SSO](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html)
|
||||
|
||||
## Setup
|
||||
|
||||
1. **Clone the Repository**
|
||||
```bash
|
||||
git clone https://github.com/your-username/aws-sso-prowler-automation.git
|
||||
cd aws-sso-prowler-automation
|
||||
```
|
||||
|
||||
2. **Create the Automation Script**
|
||||
Create a new Bash script named `run_prowler_sso.sh` and make it executable.
|
||||
|
||||
```bash
|
||||
nano run_prowler_sso.sh
|
||||
chmod +x run_prowler_sso.sh
|
||||
```
|
||||
|
||||
3. **Add the Script Content**
|
||||
Paste the following content into `run_prowler_sso.sh`:
|
||||
|
||||
4. **Configure AWS SSO Profile**
|
||||
Ensure that your AWS CLI profile (`twodragon` in this case) is correctly configured for SSO.
|
||||
|
||||
```bash
|
||||
aws configure sso --profile twodragon
|
||||
```
|
||||
|
||||
**Example Configuration Prompts:**
|
||||
```
|
||||
SSO session name (Recommended): [twodragon]
|
||||
SSO start URL [None]: https://twodragon.awsapps.com/start
|
||||
SSO region [None]: ap-northeast-2
|
||||
SSO account ID [None]: 123456789012
|
||||
SSO role name [None]: ReadOnlyAccess
|
||||
CLI default client region [None]: ap-northeast-2
|
||||
CLI default output format [None]: json
|
||||
CLI profile name [twodragon]: twodragon
|
||||
```
|
||||
|
||||
## Script Overview
|
||||
|
||||
The `run_prowler_sso.sh` script performs the following actions:
|
||||
|
||||
1. **AWS SSO Login:**
|
||||
- Initiates AWS SSO login for the specified profile.
|
||||
- Opens the SSO authorization page in the default browser for user authentication.
|
||||
|
||||
2. **Extract Temporary Credentials:**
|
||||
- Locates the most recent SSO cache file containing the `accessToken`.
|
||||
- Uses `jq` to parse and extract the `accessToken` from the cache file.
|
||||
- Retrieves the `sso_role_name` and `sso_account_id` from the AWS CLI configuration.
|
||||
- Obtains temporary AWS credentials (`AccessKeyId`, `SecretAccessKey`, `SessionToken`) using the extracted `accessToken`.
|
||||
|
||||
3. **Set Environment Variables:**
|
||||
- Exports the extracted AWS credentials as environment variables to be used by the Docker container.
|
||||
|
||||
4. **Run Prowler:**
|
||||
- Executes the **Prowler** Docker container, passing the AWS credentials as environment variables for security assessments.
|
||||
|
||||
## Usage
|
||||
|
||||
1. **Make the Script Executable**
|
||||
Ensure the script has execute permissions.
|
||||
|
||||
```bash
|
||||
chmod +x run_prowler_sso.sh
|
||||
```
|
||||
|
||||
2. **Run the Script**
|
||||
Execute the script to start the AWS SSO login process and run Prowler.
|
||||
|
||||
```bash
|
||||
./run_prowler_sso.sh
|
||||
```
|
||||
|
||||
3. **Follow the Prompts**
|
||||
- A browser window will open prompting you to authenticate via AWS SSO.
|
||||
- Complete the authentication process in the browser.
|
||||
- Upon successful login, the script will extract temporary credentials and run Prowler.
|
||||
|
||||
4. **Review Prowler Output**
|
||||
- Prowler will analyze your AWS environment based on the specified checks and output the results directly in the terminal.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you encounter issues during the script execution, follow these steps to diagnose and resolve them.
|
||||
|
||||
### 1. Verify AWS CLI Version
|
||||
|
||||
Ensure you are using AWS CLI version 2 or later.
|
||||
|
||||
```bash
|
||||
aws --version
|
||||
```
|
||||
|
||||
**Expected Output:**
|
||||
```
|
||||
aws-cli/2.11.10 Python/3.9.12 Darwin/20.3.0 exe/x86_64 prompt/off
|
||||
```
|
||||
|
||||
If you are not using version 2, [install or update AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).
|
||||
|
||||
### 2. Confirm AWS SSO Profile Configuration
|
||||
|
||||
Check that the `twodragon` profile is correctly configured.
|
||||
|
||||
```bash
|
||||
aws configure list-profiles
|
||||
```
|
||||
|
||||
**Expected Output:**
|
||||
```
|
||||
default
|
||||
twodragon
|
||||
```
|
||||
|
||||
Review the profile details:
|
||||
|
||||
```bash
|
||||
aws configure get sso_start_url --profile twodragon
|
||||
aws configure get sso_region --profile twodragon
|
||||
aws configure get sso_account_id --profile twodragon
|
||||
aws configure get sso_role_name --profile twodragon
|
||||
```
|
||||
|
||||
Ensure all fields return the correct values.
|
||||
|
||||
### 3. Check SSO Cache File
|
||||
|
||||
Ensure that the SSO cache file contains a valid `accessToken`.
|
||||
|
||||
```bash
|
||||
cat ~/.aws/sso/cache/*.json
|
||||
```
|
||||
|
||||
**Example Content:**
|
||||
```json
|
||||
{
|
||||
"accessToken": "eyJz93a...k4laUWw",
|
||||
"expiresAt": "2024-12-22T14:07:55Z",
|
||||
"clientId": "example-client-id",
|
||||
"clientSecret": "example-client-secret",
|
||||
"startUrl": "https://twodragon.awsapps.com/start#"
|
||||
}
|
||||
```
|
||||
|
||||
If `accessToken` is `null` or missing, retry the AWS SSO login:
|
||||
|
||||
```bash
|
||||
aws sso login --profile twodragon
|
||||
```
|
||||
|
||||
### 4. Validate `jq` Installation
|
||||
|
||||
Ensure that `jq` is installed and functioning correctly.
|
||||
|
||||
```bash
|
||||
jq --version
|
||||
```
|
||||
|
||||
**Expected Output:**
|
||||
```
|
||||
jq-1.6
|
||||
```
|
||||
|
||||
If `jq` is not installed, install it using the instructions in the [Prerequisites](#prerequisites) section.
|
||||
|
||||
### 5. Test Docker Environment Variables
|
||||
|
||||
Verify that the Docker container receives the AWS credentials correctly.
|
||||
|
||||
```bash
|
||||
docker run --platform linux/amd64 \
|
||||
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
|
||||
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
|
||||
-e AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN \
|
||||
toniblyx/prowler /bin/bash -c 'echo $AWS_ACCESS_KEY_ID; echo $AWS_SECRET_ACCESS_KEY; echo $AWS_SESSION_TOKEN'
|
||||
```
|
||||
|
||||
**Expected Output:**
|
||||
```
|
||||
ASIA...
|
||||
wJalrFEMI/K7MDENG/bPxRfiCY...
|
||||
IQoJb3JpZ2luX2VjEHwaCXVz...
|
||||
```
|
||||
|
||||
Ensure that none of the environment variables are empty.
|
||||
|
||||
### 6. Review Script Output
|
||||
|
||||
Run the script with debugging enabled to get detailed output.
|
||||
|
||||
1. **Enable Debugging in Script**
|
||||
Add `set -x` for verbose output.
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
set -x
|
||||
# ... rest of the script ...
|
||||
```
|
||||
|
||||
2. **Run the Script**
|
||||
|
||||
```bash
|
||||
./run_prowler_sso.sh
|
||||
```
|
||||
|
||||
3. **Analyze Output**
|
||||
Look for any errors or unexpected values in the output to identify where the script is failing.
|
||||
|
||||
## Customization
|
||||
|
||||
You can modify the script to suit your specific needs, such as:
|
||||
|
||||
- **Changing the AWS Profile Name:**
|
||||
Update the `PROFILE` variable at the top of the script.
|
||||
|
||||
```bash
|
||||
PROFILE="your-profile-name"
|
||||
```
|
||||
|
||||
- **Adding Prowler Options:**
|
||||
Pass additional options to Prowler for customized checks or output formats.
|
||||
|
||||
```bash
|
||||
docker run --platform linux/amd64 \
|
||||
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
|
||||
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
|
||||
-e AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN \
|
||||
toniblyx/prowler -c check123 -M json
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- **Handle Credentials Securely:**
|
||||
- Avoid sharing or exposing your AWS credentials.
|
||||
- Do not include sensitive information in logs or version control.
|
||||
|
||||
- **Script Permissions:**
|
||||
- Ensure the script file has appropriate permissions to prevent unauthorized access.
|
||||
|
||||
```bash
|
||||
chmod 700 run_prowler_sso.sh
|
||||
```
|
||||
|
||||
- **Environment Variables:**
|
||||
- Be cautious when exporting credentials as environment variables.
|
||||
- Consider using more secure methods for credential management if necessary.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT License](LICENSE).
|
||||
136
contrib/aws/aws-sso-docker/run_prowler_sso.sh
Executable file
136
contrib/aws/aws-sso-docker/run_prowler_sso.sh
Executable file
@@ -0,0 +1,136 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Set the profile name
|
||||
PROFILE="twodragon"
|
||||
|
||||
# Set the Prowler output directory
|
||||
OUTPUT_DIR=~/prowler-output
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
# Set the port for the local web server
|
||||
WEB_SERVER_PORT=8000
|
||||
|
||||
# ----------------------------------------------
|
||||
# Functions
|
||||
# ----------------------------------------------
|
||||
|
||||
# Function to open the HTML report in the default browser
|
||||
open_report() {
|
||||
local report_path="$1"
|
||||
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
open "$report_path"
|
||||
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
xdg-open "$report_path"
|
||||
elif [[ "$OSTYPE" == "msys" ]]; then
|
||||
start "" "$report_path"
|
||||
else
|
||||
echo "Automatic method to open Prowler HTML report is not supported on this OS."
|
||||
echo "Please open the report manually at: $report_path"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to start a simple HTTP server to host the Prowler reports
|
||||
start_web_server() {
|
||||
local directory="$1"
|
||||
local port="$2"
|
||||
|
||||
echo "Starting local web server to host Prowler reports at http://localhost:$port"
|
||||
echo "Press Ctrl+C to stop the web server."
|
||||
|
||||
# Change to the output directory
|
||||
cd "$directory"
|
||||
|
||||
# Start the HTTP server in the foreground
|
||||
# Python 3 is required
|
||||
python3 -m http.server "$port"
|
||||
}
|
||||
|
||||
# ----------------------------------------------
|
||||
# Main Script
|
||||
# ----------------------------------------------
|
||||
|
||||
# AWS SSO Login
|
||||
echo "Logging into AWS SSO..."
|
||||
aws sso login --profile "$PROFILE"
|
||||
|
||||
# Extract temporary credentials
|
||||
echo "Extracting temporary credentials..."
|
||||
|
||||
# Find the most recently modified SSO cache file
|
||||
CACHE_FILE=$(ls -t ~/.aws/sso/cache/*.json 2>/dev/null | head -n 1)
|
||||
echo "Cache File: $CACHE_FILE"
|
||||
|
||||
if [ -z "$CACHE_FILE" ]; then
|
||||
echo "SSO cache file not found. Please ensure AWS SSO login was successful."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract accessToken using jq
|
||||
ACCESS_TOKEN=$(jq -r '.accessToken' "$CACHE_FILE")
|
||||
echo "Access Token: $ACCESS_TOKEN"
|
||||
|
||||
if [ -z "$ACCESS_TOKEN" ] || [ "$ACCESS_TOKEN" == "null" ]; then
|
||||
echo "Unable to extract accessToken. Please check your SSO login and cache file."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract role name and account ID from AWS CLI configuration
|
||||
ROLE_NAME=$(aws configure get sso_role_name --profile "$PROFILE")
|
||||
ACCOUNT_ID=$(aws configure get sso_account_id --profile "$PROFILE")
|
||||
echo "Role Name: $ROLE_NAME"
|
||||
echo "Account ID: $ACCOUNT_ID"
|
||||
|
||||
if [ -z "$ROLE_NAME" ] || [ -z "$ACCOUNT_ID" ]; then
|
||||
echo "Unable to extract sso_role_name or sso_account_id. Please check your profile configuration."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Obtain temporary credentials using AWS SSO
|
||||
TEMP_CREDS=$(aws sso get-role-credentials \
|
||||
--role-name "$ROLE_NAME" \
|
||||
--account-id "$ACCOUNT_ID" \
|
||||
--access-token "$ACCESS_TOKEN" \
|
||||
--profile "$PROFILE")
|
||||
|
||||
echo "TEMP_CREDS: $TEMP_CREDS"
|
||||
|
||||
# Extract credentials from the JSON response
|
||||
AWS_ACCESS_KEY_ID=$(echo "$TEMP_CREDS" | jq -r '.roleCredentials.accessKeyId')
|
||||
AWS_SECRET_ACCESS_KEY=$(echo "$TEMP_CREDS" | jq -r '.roleCredentials.secretAccessKey')
|
||||
AWS_SESSION_TOKEN=$(echo "$TEMP_CREDS" | jq -r '.roleCredentials.sessionToken')
|
||||
|
||||
# Verify that all credentials were extracted successfully
|
||||
if [ -z "$AWS_ACCESS_KEY_ID" ] || [ -z "$AWS_SECRET_ACCESS_KEY" ] || [ -z "$AWS_SESSION_TOKEN" ]; then
|
||||
echo "Unable to extract temporary credentials."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Export AWS credentials as environment variables
|
||||
export AWS_ACCESS_KEY_ID
|
||||
export AWS_SECRET_ACCESS_KEY
|
||||
export AWS_SESSION_TOKEN
|
||||
|
||||
echo "AWS credentials have been set."
|
||||
|
||||
# Run Prowler in Docker container
|
||||
echo "Running Prowler Docker container..."
|
||||
|
||||
docker run --platform linux/amd64 \
|
||||
-e AWS_ACCESS_KEY_ID="$AWS_ACCESS_KEY_ID" \
|
||||
-e AWS_SECRET_ACCESS_KEY="$AWS_SECRET_ACCESS_KEY" \
|
||||
-e AWS_SESSION_TOKEN="$AWS_SESSION_TOKEN" \
|
||||
-v "$OUTPUT_DIR":/home/prowler/output \
|
||||
toniblyx/prowler -M html -M csv -M json-ocsf --output-directory /home/prowler/output --output-filename prowler-output
|
||||
|
||||
echo "Prowler has finished running. Reports are saved in $OUTPUT_DIR."
|
||||
|
||||
# Open the HTML report in the default browser
|
||||
REPORT_PATH="$OUTPUT_DIR/prowler-output.html"
|
||||
echo "Opening Prowler HTML report..."
|
||||
open_report "$REPORT_PATH" &
|
||||
|
||||
# Start the local web server to host the Prowler dashboard
|
||||
# This will run in the foreground. To run it in the background, append an ampersand (&) at the end of the command.
|
||||
start_web_server "$OUTPUT_DIR" "$WEB_SERVER_PORT"
|
||||
24
contrib/k8s/helm/prowler-api/Chart.yaml
Normal file
24
contrib/k8s/helm/prowler-api/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: prowler-api
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "5.1.1"
|
||||
22
contrib/k8s/helm/prowler-api/templates/NOTES.txt
Normal file
22
contrib/k8s/helm/prowler-api/templates/NOTES.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
1. Get the application URL by running these commands:
|
||||
{{- if .Values.ingress.enabled }}
|
||||
{{- range $host := .Values.ingress.hosts }}
|
||||
{{- range .paths }}
|
||||
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- else if contains "NodePort" .Values.service.type }}
|
||||
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "prowler-api.fullname" . }})
|
||||
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
|
||||
echo http://$NODE_IP:$NODE_PORT
|
||||
{{- else if contains "LoadBalancer" .Values.service.type }}
|
||||
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
|
||||
You can watch its status by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "prowler-api.fullname" . }}'
|
||||
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "prowler-api.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
|
||||
echo http://$SERVICE_IP:{{ .Values.service.port }}
|
||||
{{- else if contains "ClusterIP" .Values.service.type }}
|
||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "prowler-api.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
||||
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
|
||||
echo "Visit http://127.0.0.1:8080 to use your application"
|
||||
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
|
||||
{{- end }}
|
||||
62
contrib/k8s/helm/prowler-api/templates/_helpers.tpl
Normal file
62
contrib/k8s/helm/prowler-api/templates/_helpers.tpl
Normal file
@@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "prowler-api.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "prowler-api.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "prowler-api.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "prowler-api.labels" -}}
|
||||
helm.sh/chart: {{ include "prowler-api.chart" . }}
|
||||
{{ include "prowler-api.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "prowler-api.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "prowler-api.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "prowler-api.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "prowler-api.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
9
contrib/k8s/helm/prowler-api/templates/configmap.yaml
Normal file
9
contrib/k8s/helm/prowler-api/templates/configmap.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ include "prowler-api.fullname" . }}-config
|
||||
labels:
|
||||
{{- include "prowler-api.labels" . | nindent 4 }}
|
||||
data:
|
||||
config.yaml: |-
|
||||
{{- toYaml .Values.mainConfig | nindent 4 }}
|
||||
85
contrib/k8s/helm/prowler-api/templates/deployment.yaml
Normal file
85
contrib/k8s/helm/prowler-api/templates/deployment.yaml
Normal file
@@ -0,0 +1,85 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "prowler-api.fullname" . }}
|
||||
labels:
|
||||
{{- include "prowler-api.labels" . | nindent 4 }}
|
||||
spec:
|
||||
{{- if not .Values.autoscaling.enabled }}
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
{{- end }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "prowler-api.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
checksum/secrets: {{ include (print $.Template.BasePath "/secrets.yaml") . | sha256sum }}
|
||||
checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }}
|
||||
{{- with .Values.podAnnotations }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "prowler-api.labels" . | nindent 8 }}
|
||||
{{- with .Values.podLabels }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "prowler-api.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
{{- range $name,$config := .Values.containers }}
|
||||
{{- if $config.enabled }}
|
||||
- name: {{ $name }}
|
||||
securityContext:
|
||||
{{- toYaml $config.securityContext | nindent 12 }}
|
||||
image: "{{ $config.image.repository }}:{{ $config.image.tag | default $.Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ $config.image.pullPolicy }}
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: {{ include "prowler-api.fullname" $ }}
|
||||
command:
|
||||
{{- toYaml $config.command | nindent 12 }}
|
||||
{{- if $config.ports }}
|
||||
ports:
|
||||
{{- toYaml $config.ports | nindent 12 }}
|
||||
{{- end }}
|
||||
livenessProbe:
|
||||
{{- toYaml $config.livenessProbe | nindent 12 }}
|
||||
readinessProbe:
|
||||
{{- toYaml $config.readinessProbe | nindent 12 }}
|
||||
resources:
|
||||
{{- toYaml $config.resources | nindent 12 }}
|
||||
volumeMounts:
|
||||
- name: {{ include "prowler-api.fullname" $ }}-config
|
||||
mountPath: {{ $.Values.releaseConfigRoot }}{{ $.Values.releaseConfigPath }}
|
||||
subPath: config.yaml
|
||||
{{- with .volumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
volumes:
|
||||
- name: {{ include "prowler-api.fullname" . }}-config
|
||||
configMap:
|
||||
name: {{ include "prowler-api.fullname" . }}-config
|
||||
{{- with .Values.volumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
43
contrib/k8s/helm/prowler-api/templates/ingress.yaml
Normal file
43
contrib/k8s/helm/prowler-api/templates/ingress.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
{{- if .Values.ingress.enabled -}}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ include "prowler-api.fullname" . }}
|
||||
labels:
|
||||
{{- include "prowler-api.labels" . | nindent 4 }}
|
||||
{{- with .Values.ingress.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.ingress.className }}
|
||||
ingressClassName: {{ . }}
|
||||
{{- end }}
|
||||
{{- if .Values.ingress.tls }}
|
||||
tls:
|
||||
{{- range .Values.ingress.tls }}
|
||||
- hosts:
|
||||
{{- range .hosts }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
secretName: {{ .secretName }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
rules:
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- host: {{ .host | quote }}
|
||||
http:
|
||||
paths:
|
||||
{{- range .paths }}
|
||||
- path: {{ .path }}
|
||||
{{- with .pathType }}
|
||||
pathType: {{ . }}
|
||||
{{- end }}
|
||||
backend:
|
||||
service:
|
||||
name: {{ include "prowler-api.fullname" $ }}
|
||||
port:
|
||||
number: {{ $.Values.service.port }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
11
contrib/k8s/helm/prowler-api/templates/secrets.yaml
Normal file
11
contrib/k8s/helm/prowler-api/templates/secrets.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: {{ include "prowler-api.fullname" . }}
|
||||
labels:
|
||||
{{- include "prowler-api.labels" . | nindent 4 }}
|
||||
type: Opaque
|
||||
data:
|
||||
{{- range $k, $v := .Values.secrets }}
|
||||
{{ $k }}: {{ $v | toString | b64enc | quote }}
|
||||
{{- end }}
|
||||
21
contrib/k8s/helm/prowler-api/templates/service.yaml
Normal file
21
contrib/k8s/helm/prowler-api/templates/service.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "prowler-api.fullname" . }}
|
||||
labels:
|
||||
{{- include "prowler-api.labels" . | nindent 4 }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
{{- range $name,$config := .Values.containers }}
|
||||
{{- if $config.ports }}
|
||||
{{- range $p := $config.ports }}
|
||||
- port: {{ $p.containerPort }}
|
||||
targetPort: {{ $p.containerPort }}
|
||||
protocol: TCP
|
||||
name: {{ $config.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
selector:
|
||||
{{- include "prowler-api.selectorLabels" . | nindent 4 }}
|
||||
13
contrib/k8s/helm/prowler-api/templates/serviceaccount.yaml
Normal file
13
contrib/k8s/helm/prowler-api/templates/serviceaccount.yaml
Normal file
@@ -0,0 +1,13 @@
|
||||
{{- if .Values.serviceAccount.create -}}
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "prowler-api.serviceAccountName" . }}
|
||||
labels:
|
||||
{{- include "prowler-api.labels" . | nindent 4 }}
|
||||
{{- with .Values.serviceAccount.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
automountServiceAccountToken: {{ .Values.serviceAccount.automount }}
|
||||
{{- end }}
|
||||
625
contrib/k8s/helm/prowler-api/values.yaml
Normal file
625
contrib/k8s/helm/prowler-api/values.yaml
Normal file
@@ -0,0 +1,625 @@
|
||||
# Default values for prowler-api.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
# This will set the replicaset count more information can be found here: https://kubernetes.io/docs/concepts/workloads/controllers/replicaset/
|
||||
replicaCount: 1
|
||||
|
||||
# This sets the container image more information can be found here: https://kubernetes.io/docs/concepts/containers/images/
|
||||
containers:
|
||||
prowler-api:
|
||||
enabled: true
|
||||
image:
|
||||
repository: prowlercloud/prowler-api
|
||||
pullPolicy: IfNotPresent
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8080
|
||||
protocol: TCP
|
||||
command: ["/home/prowler/docker-entrypoint.sh", "prod"]
|
||||
worker:
|
||||
enabled: true
|
||||
image:
|
||||
repository: prowlercloud/prowler-api
|
||||
pullPolicy: IfNotPresent
|
||||
command: ["/home/prowler/docker-entrypoint.sh", "worker"]
|
||||
worker-beat:
|
||||
enabled: true
|
||||
image:
|
||||
repository: prowlercloud/prowler-api
|
||||
pullPolicy: IfNotPresent
|
||||
command: ["../docker-entrypoint.sh", "beat"]
|
||||
|
||||
secrets:
|
||||
POSTGRES_HOST:
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_ADMIN_USER:
|
||||
POSTGRES_ADMIN_PASSWORD:
|
||||
POSTGRES_USER:
|
||||
POSTGRES_PASSWORD:
|
||||
POSTGRES_DB:
|
||||
# Valkey settings
|
||||
VALKEY_HOST: valkey-headless
|
||||
VALKEY_PORT: "6379"
|
||||
VALKEY_DB: "0"
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS: localhost,127.0.0.1,prowler-api
|
||||
DJANGO_BIND_ADDRESS: 0.0.0.0
|
||||
DJANGO_PORT: "8080"
|
||||
DJANGO_DEBUG: False
|
||||
DJANGO_SETTINGS_MODULE: config.django.production
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER: human_readable
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL: INFO
|
||||
# Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_WORKERS: 2
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME: "30"
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME: "1440"
|
||||
DJANGO_CACHE_MAX_AGE: "3600"
|
||||
DJANGO_STALE_WHILE_REVALIDATE: "60"
|
||||
DJANGO_MANAGE_DB_PARTITIONS: "False"
|
||||
# openssl genrsa -out private.pem 2048
|
||||
DJANGO_TOKEN_SIGNING_KEY:
|
||||
# openssl rsa -in private.pem -pubout -out public.pem
|
||||
DJANGO_TOKEN_VERIFYING_KEY:
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY:
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT: 86400
|
||||
|
||||
releaseConfigRoot: /home/prowler/.cache/pypoetry/virtualenvs/prowler-api-NnJNioq7-py3.12/lib/python3.12/site-packages/
|
||||
releaseConfigPath: prowler/config/config.yaml
|
||||
|
||||
mainConfig:
|
||||
# AWS Configuration
|
||||
aws:
|
||||
# AWS Global Configuration
|
||||
# aws.mute_non_default_regions --> Set to True to muted failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
|
||||
mute_non_default_regions: False
|
||||
# If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`:
|
||||
# Mutelist:
|
||||
# Accounts:
|
||||
# "*":
|
||||
# Checks:
|
||||
# "*":
|
||||
# Regions:
|
||||
# - "ap-southeast-1"
|
||||
# - "ap-southeast-2"
|
||||
# Resources:
|
||||
# - "*"
|
||||
|
||||
# AWS IAM Configuration
|
||||
# aws.iam_user_accesskey_unused --> CIS recommends 45 days
|
||||
max_unused_access_keys_days: 45
|
||||
# aws.iam_user_console_access_unused --> CIS recommends 45 days
|
||||
max_console_access_days: 45
|
||||
|
||||
# AWS EC2 Configuration
|
||||
# aws.ec2_elastic_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
# aws.ec2_securitygroup_with_many_ingress_egress_rules --> by default is 50 rules
|
||||
max_security_group_rules: 50
|
||||
# aws.ec2_instance_older_than_specific_days --> by default is 6 months (180 days)
|
||||
max_ec2_instance_age_in_days: 180
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_any_port
|
||||
# allowed network interface types for security groups open to the Internet
|
||||
ec2_allowed_interface_types:
|
||||
[
|
||||
"api_gateway_managed",
|
||||
"vpc_endpoint",
|
||||
]
|
||||
# allowed network interface owners for security groups open to the Internet
|
||||
ec2_allowed_instance_owners:
|
||||
[
|
||||
"amazon-elb"
|
||||
]
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
|
||||
ec2_high_risk_ports:
|
||||
[
|
||||
25,
|
||||
110,
|
||||
135,
|
||||
143,
|
||||
445,
|
||||
3000,
|
||||
4333,
|
||||
5000,
|
||||
5500,
|
||||
8080,
|
||||
8088,
|
||||
]
|
||||
|
||||
# AWS ECS Configuration
|
||||
# aws.ecs_service_fargate_latest_platform_version
|
||||
fargate_linux_latest_version: "1.4.0"
|
||||
fargate_windows_latest_version: "1.0.0"
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
|
||||
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
|
||||
trusted_account_ids: []
|
||||
|
||||
# AWS Cloudwatch Configuration
|
||||
# aws.cloudwatch_log_group_retention_policy_specific_days_enabled --> by default is 365 days
|
||||
log_group_retention_days: 365
|
||||
|
||||
# AWS CloudFormation Configuration
|
||||
# cloudformation_stack_cdktoolkit_bootstrap_version --> by default is 21
|
||||
recommended_cdk_bootstrap_version: 21
|
||||
|
||||
# AWS AppStream Session Configuration
|
||||
# aws.appstream_fleet_session_idle_disconnect_timeout
|
||||
max_idle_disconnect_timeout_in_seconds: 600 # 10 Minutes
|
||||
# aws.appstream_fleet_session_disconnect_timeout
|
||||
max_disconnect_timeout_in_seconds: 300 # 5 Minutes
|
||||
# aws.appstream_fleet_maximum_session_duration
|
||||
max_session_duration_seconds: 36000 # 10 Hours
|
||||
|
||||
# AWS Lambda Configuration
|
||||
# aws.awslambda_function_using_supported_runtimes
|
||||
obsolete_lambda_runtimes:
|
||||
[
|
||||
"java8",
|
||||
"go1.x",
|
||||
"provided",
|
||||
"python3.6",
|
||||
"python2.7",
|
||||
"python3.7",
|
||||
"nodejs4.3",
|
||||
"nodejs4.3-edge",
|
||||
"nodejs6.10",
|
||||
"nodejs",
|
||||
"nodejs8.10",
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"nodejs16.x",
|
||||
"dotnet5.0",
|
||||
"dotnet7",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
"dotnetcore3.1",
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
# aws.awslambda_function_vpc_is_in_multi_azs
|
||||
lambda_min_azs: 2
|
||||
|
||||
# AWS Organizations
|
||||
# aws.organizations_scp_check_deny_regions
|
||||
# aws.organizations_enabled_regions: [
|
||||
# "eu-central-1",
|
||||
# "eu-west-1",
|
||||
# "us-east-1"
|
||||
# ]
|
||||
organizations_enabled_regions: []
|
||||
organizations_trusted_delegated_administrators: []
|
||||
|
||||
# AWS ECR
|
||||
# aws.ecr_repositories_scan_vulnerabilities_in_latest_image
|
||||
# CRITICAL
|
||||
# HIGH
|
||||
# MEDIUM
|
||||
ecr_repository_vulnerability_minimum_severity: "MEDIUM"
|
||||
|
||||
# AWS Trusted Advisor
|
||||
# aws.trustedadvisor_premium_support_plan_subscribed
|
||||
verify_premium_support_plans: True
|
||||
|
||||
# AWS CloudTrail Configuration
|
||||
# aws.cloudtrail_threat_detection_privilege_escalation
|
||||
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
|
||||
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_privilege_escalation_actions:
|
||||
[
|
||||
"AddPermission",
|
||||
"AddRoleToInstanceProfile",
|
||||
"AddUserToGroup",
|
||||
"AssociateAccessPolicy",
|
||||
"AssumeRole",
|
||||
"AttachGroupPolicy",
|
||||
"AttachRolePolicy",
|
||||
"AttachUserPolicy",
|
||||
"ChangePassword",
|
||||
"CreateAccessEntry",
|
||||
"CreateAccessKey",
|
||||
"CreateDevEndpoint",
|
||||
"CreateEventSourceMapping",
|
||||
"CreateFunction",
|
||||
"CreateGroup",
|
||||
"CreateJob",
|
||||
"CreateKeyPair",
|
||||
"CreateLoginProfile",
|
||||
"CreatePipeline",
|
||||
"CreatePolicyVersion",
|
||||
"CreateRole",
|
||||
"CreateStack",
|
||||
"DeleteRolePermissionsBoundary",
|
||||
"DeleteRolePolicy",
|
||||
"DeleteUserPermissionsBoundary",
|
||||
"DeleteUserPolicy",
|
||||
"DetachRolePolicy",
|
||||
"DetachUserPolicy",
|
||||
"GetCredentialsForIdentity",
|
||||
"GetId",
|
||||
"GetPolicyVersion",
|
||||
"GetUserPolicy",
|
||||
"Invoke",
|
||||
"ModifyInstanceAttribute",
|
||||
"PassRole",
|
||||
"PutGroupPolicy",
|
||||
"PutPipelineDefinition",
|
||||
"PutRolePermissionsBoundary",
|
||||
"PutRolePolicy",
|
||||
"PutUserPermissionsBoundary",
|
||||
"PutUserPolicy",
|
||||
"ReplaceIamInstanceProfileAssociation",
|
||||
"RunInstances",
|
||||
"SetDefaultPolicyVersion",
|
||||
"UpdateAccessKey",
|
||||
"UpdateAssumeRolePolicy",
|
||||
"UpdateDevEndpoint",
|
||||
"UpdateEventSourceMapping",
|
||||
"UpdateFunctionCode",
|
||||
"UpdateJob",
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_enumeration
|
||||
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
|
||||
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_enumeration_actions:
|
||||
[
|
||||
"DescribeAccessEntry",
|
||||
"DescribeAccountAttributes",
|
||||
"DescribeAvailabilityZones",
|
||||
"DescribeBundleTasks",
|
||||
"DescribeCarrierGateways",
|
||||
"DescribeClientVpnRoutes",
|
||||
"DescribeCluster",
|
||||
"DescribeDhcpOptions",
|
||||
"DescribeFlowLogs",
|
||||
"DescribeImages",
|
||||
"DescribeInstanceAttribute",
|
||||
"DescribeInstanceInformation",
|
||||
"DescribeInstanceTypes",
|
||||
"DescribeInstances",
|
||||
"DescribeInstances",
|
||||
"DescribeKeyPairs",
|
||||
"DescribeLogGroups",
|
||||
"DescribeLogStreams",
|
||||
"DescribeOrganization",
|
||||
"DescribeRegions",
|
||||
"DescribeSecurityGroups",
|
||||
"DescribeSnapshotAttribute",
|
||||
"DescribeSnapshotTierStatus",
|
||||
"DescribeSubscriptionFilters",
|
||||
"DescribeTransitGatewayMulticastDomains",
|
||||
"DescribeVolumes",
|
||||
"DescribeVolumesModifications",
|
||||
"DescribeVpcEndpointConnectionNotifications",
|
||||
"DescribeVpcs",
|
||||
"GetAccount",
|
||||
"GetAccountAuthorizationDetails",
|
||||
"GetAccountSendingEnabled",
|
||||
"GetBucketAcl",
|
||||
"GetBucketLogging",
|
||||
"GetBucketPolicy",
|
||||
"GetBucketReplication",
|
||||
"GetBucketVersioning",
|
||||
"GetCallerIdentity",
|
||||
"GetCertificate",
|
||||
"GetConsoleScreenshot",
|
||||
"GetCostAndUsage",
|
||||
"GetDetector",
|
||||
"GetEbsDefaultKmsKeyId",
|
||||
"GetEbsEncryptionByDefault",
|
||||
"GetFindings",
|
||||
"GetFlowLogsIntegrationTemplate",
|
||||
"GetIdentityVerificationAttributes",
|
||||
"GetInstances",
|
||||
"GetIntrospectionSchema",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLogRecord",
|
||||
"GetParameters",
|
||||
"GetPolicyVersion",
|
||||
"GetPublicAccessBlock",
|
||||
"GetQueryResults",
|
||||
"GetRegions",
|
||||
"GetSMSAttributes",
|
||||
"GetSMSSandboxAccountStatus",
|
||||
"GetSendQuota",
|
||||
"GetTransitGatewayRouteTableAssociations",
|
||||
"GetUserPolicy",
|
||||
"HeadObject",
|
||||
"ListAccessKeys",
|
||||
"ListAccounts",
|
||||
"ListAllMyBuckets",
|
||||
"ListAssociatedAccessPolicies",
|
||||
"ListAttachedUserPolicies",
|
||||
"ListClusters",
|
||||
"ListDetectors",
|
||||
"ListDomains",
|
||||
"ListFindings",
|
||||
"ListHostedZones",
|
||||
"ListIPSets",
|
||||
"ListIdentities",
|
||||
"ListInstanceProfiles",
|
||||
"ListObjects",
|
||||
"ListOrganizationalUnitsForParent",
|
||||
"ListOriginationNumbers",
|
||||
"ListPolicyVersions",
|
||||
"ListRoles",
|
||||
"ListRoles",
|
||||
"ListRules",
|
||||
"ListServiceQuotas",
|
||||
"ListSubscriptions",
|
||||
"ListTargetsByRule",
|
||||
"ListTopics",
|
||||
"ListUsers",
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_llm_jacking
|
||||
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
|
||||
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_llm_jacking_actions:
|
||||
[
|
||||
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
|
||||
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
|
||||
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
|
||||
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
|
||||
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
|
||||
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
|
||||
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
|
||||
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
|
||||
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
|
||||
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
|
||||
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
|
||||
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
|
||||
]
|
||||
|
||||
# AWS RDS Configuration
|
||||
# aws.rds_instance_backup_enabled
|
||||
# Whether to check RDS instance replicas or not
|
||||
check_rds_instance_replicas: False
|
||||
|
||||
# AWS ACM Configuration
|
||||
# aws.acm_certificates_expiration_check
|
||||
days_to_expire_threshold: 7
|
||||
# aws.acm_certificates_with_secure_key_algorithms
|
||||
insecure_key_algorithms:
|
||||
[
|
||||
"RSA-1024",
|
||||
"P-192",
|
||||
"SHA-1",
|
||||
]
|
||||
|
||||
# AWS EKS Configuration
|
||||
# aws.eks_control_plane_logging_all_types_enabled
|
||||
# EKS control plane logging types that must be enabled
|
||||
eks_required_log_types:
|
||||
[
|
||||
"api",
|
||||
"audit",
|
||||
"authenticator",
|
||||
"controllerManager",
|
||||
"scheduler",
|
||||
]
|
||||
|
||||
# aws.eks_cluster_uses_a_supported_version
|
||||
# EKS clusters must be version 1.28 or higher
|
||||
eks_cluster_oldest_version_supported: "1.28"
|
||||
|
||||
# AWS CodeBuild Configuration
|
||||
# aws.codebuild_project_no_secrets_in_variables
|
||||
# CodeBuild sensitive variables that are excluded from the check
|
||||
excluded_sensitive_environment_variables:
|
||||
[
|
||||
|
||||
]
|
||||
|
||||
# AWS ELB Configuration
|
||||
# aws.elb_is_in_multiple_az
|
||||
# Minimum number of Availability Zones that an CLB must be in
|
||||
elb_min_azs: 2
|
||||
|
||||
# AWS ELBv2 Configuration
|
||||
# aws.elbv2_is_in_multiple_az
|
||||
# Minimum number of Availability Zones that an ELBv2 must be in
|
||||
elbv2_min_azs: 2
|
||||
|
||||
|
||||
# AWS Secrets Configuration
|
||||
# Patterns to ignore in the secrets checks
|
||||
secrets_ignore_patterns: []
|
||||
|
||||
# AWS Secrets Manager Configuration
|
||||
# aws.secretsmanager_secret_unused
|
||||
# Maximum number of days a secret can be unused
|
||||
max_days_secret_unused: 90
|
||||
|
||||
# aws.secretsmanager_secret_rotated_periodically
|
||||
# Maximum number of days a secret should be rotated
|
||||
max_days_secret_unrotated: 90
|
||||
|
||||
# AWS Kinesis Configuration
|
||||
# Minimum retention period in hours for Kinesis streams
|
||||
min_kinesis_stream_retention_hours: 168 # 7 days
|
||||
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
# azure.network_public_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
|
||||
# Azure App Service
|
||||
# azure.app_ensure_php_version_is_latest
|
||||
php_latest_version: "8.2"
|
||||
# azure.app_ensure_python_version_is_latest
|
||||
python_latest_version: "3.12"
|
||||
# azure.app_ensure_java_version_is_latest
|
||||
java_latest_version: "17"
|
||||
|
||||
# Azure SQL Server
|
||||
# azure.sqlserver_minimal_tls_version
|
||||
recommended_minimal_tls_versions:
|
||||
[
|
||||
"1.2",
|
||||
"1.3",
|
||||
]
|
||||
|
||||
# GCP Configuration
|
||||
gcp:
|
||||
# GCP Compute Configuration
|
||||
# gcp.compute_public_address_shodan
|
||||
shodan_api_key: null
|
||||
|
||||
# Kubernetes Configuration
|
||||
kubernetes:
|
||||
# Kubernetes API Server
|
||||
# kubernetes.apiserver_audit_log_maxbackup_set
|
||||
audit_log_maxbackup: 10
|
||||
# kubernetes.apiserver_audit_log_maxsize_set
|
||||
audit_log_maxsize: 100
|
||||
# kubernetes.apiserver_audit_log_maxage_set
|
||||
audit_log_maxage: 30
|
||||
# kubernetes.apiserver_strong_ciphers_only
|
||||
apiserver_strong_ciphers:
|
||||
[
|
||||
"TLS_AES_128_GCM_SHA256",
|
||||
"TLS_AES_256_GCM_SHA384",
|
||||
"TLS_CHACHA20_POLY1305_SHA256",
|
||||
]
|
||||
# Kubelet
|
||||
# kubernetes.kubelet_strong_ciphers_only
|
||||
kubelet_strong_ciphers:
|
||||
[
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
]
|
||||
|
||||
|
||||
# This is for the secretes for pulling an image from a private repository more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
|
||||
imagePullSecrets: []
|
||||
# This is to override the chart name.
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
#This section builds out the service account more information can be found here: https://kubernetes.io/docs/concepts/security/service-accounts/
|
||||
serviceAccount:
|
||||
# Specifies whether a service account should be created
|
||||
create: true
|
||||
# Automatically mount a ServiceAccount's API credentials?
|
||||
automount: true
|
||||
# Annotations to add to the service account
|
||||
annotations: {}
|
||||
# The name of the service account to use.
|
||||
# If not set and create is true, a name is generated using the fullname template
|
||||
name: ""
|
||||
|
||||
# This is for setting Kubernetes Annotations to a Pod.
|
||||
# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/
|
||||
podAnnotations: {}
|
||||
# This is for setting Kubernetes Labels to a Pod.
|
||||
# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/
|
||||
podLabels: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext: {}
|
||||
# capabilities:
|
||||
# drop:
|
||||
# - ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
# runAsNonRoot: true
|
||||
# runAsUser: 1000
|
||||
|
||||
# This is for setting up a service more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/
|
||||
service:
|
||||
# This sets the service type more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types
|
||||
type: ClusterIP
|
||||
# This sets the ports more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#field-spec-ports
|
||||
port: 80
|
||||
|
||||
# This block is for setting up the ingress for more information can be found here: https://kubernetes.io/docs/concepts/services-networking/ingress/
|
||||
ingress:
|
||||
enabled: false
|
||||
className: ""
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
hosts:
|
||||
- host: chart-example.local
|
||||
paths:
|
||||
- path: /
|
||||
pathType: ImplementationSpecific
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - chart-example.local
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
# This is to setup the liveness and readiness probes more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
|
||||
#This section is for setting up autoscaling more information can be found here: https://kubernetes.io/docs/concepts/workloads/autoscaling/
|
||||
autoscaling:
|
||||
enabled: false
|
||||
minReplicas: 1
|
||||
maxReplicas: 100
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# targetMemoryUtilizationPercentage: 80
|
||||
|
||||
# Additional volumes on the output Deployment definition.
|
||||
volumes: []
|
||||
# - name: foo
|
||||
# secret:
|
||||
# secretName: mysecret
|
||||
# optional: false
|
||||
|
||||
# Additional volumeMounts on the output Deployment definition.
|
||||
volumeMounts: []
|
||||
# - name: foo
|
||||
# mountPath: "/etc/foo"
|
||||
# readOnly: true
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
23
contrib/k8s/helm/prowler-cli/.helmignore
Normal file
23
contrib/k8s/helm/prowler-cli/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
@@ -39,4 +39,3 @@ spec:
|
||||
path: {{ $value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
23
contrib/k8s/helm/prowler-ui/.helmignore
Normal file
23
contrib/k8s/helm/prowler-ui/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
6
contrib/k8s/helm/prowler-ui/Chart.yaml
Normal file
6
contrib/k8s/helm/prowler-ui/Chart.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
apiVersion: v2
|
||||
name: prowler-ui
|
||||
description: A Helm chart for Kubernetes
|
||||
type: application
|
||||
version: 0.1.0
|
||||
appVersion: "5.1.1"
|
||||
22
contrib/k8s/helm/prowler-ui/templates/NOTES.txt
Normal file
22
contrib/k8s/helm/prowler-ui/templates/NOTES.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
1. Get the application URL by running these commands:
|
||||
{{- if .Values.ingress.enabled }}
|
||||
{{- range $host := .Values.ingress.hosts }}
|
||||
{{- range .paths }}
|
||||
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- else if contains "NodePort" .Values.service.type }}
|
||||
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "prowler-ui.fullname" . }})
|
||||
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
|
||||
echo http://$NODE_IP:$NODE_PORT
|
||||
{{- else if contains "LoadBalancer" .Values.service.type }}
|
||||
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
|
||||
You can watch its status by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "prowler-ui.fullname" . }}'
|
||||
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "prowler-ui.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
|
||||
echo http://$SERVICE_IP:{{ .Values.service.port }}
|
||||
{{- else if contains "ClusterIP" .Values.service.type }}
|
||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "prowler-ui.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
||||
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
|
||||
echo "Visit http://127.0.0.1:8080 to use your application"
|
||||
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
|
||||
{{- end }}
|
||||
62
contrib/k8s/helm/prowler-ui/templates/_helpers.tpl
Normal file
62
contrib/k8s/helm/prowler-ui/templates/_helpers.tpl
Normal file
@@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "prowler-ui.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "prowler-ui.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "prowler-ui.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "prowler-ui.labels" -}}
|
||||
helm.sh/chart: {{ include "prowler-ui.chart" . }}
|
||||
{{ include "prowler-ui.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "prowler-ui.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "prowler-ui.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "prowler-ui.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "prowler-ui.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
72
contrib/k8s/helm/prowler-ui/templates/deployment.yaml
Normal file
72
contrib/k8s/helm/prowler-ui/templates/deployment.yaml
Normal file
@@ -0,0 +1,72 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "prowler-ui.fullname" . }}
|
||||
labels:
|
||||
{{- include "prowler-ui.labels" . | nindent 4 }}
|
||||
spec:
|
||||
{{- if not .Values.autoscaling.enabled }}
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
{{- end }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "prowler-ui.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
checksum/config: {{ include (print $.Template.BasePath "/secrets.yaml") . | sha256sum }}
|
||||
{{- with .Values.podAnnotations }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "prowler-ui.labels" . | nindent 8 }}
|
||||
{{- with .Values.podLabels }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "prowler-ui.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: {{ include "prowler-ui.fullname" $ }}
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: {{ .Values.service.port }}
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
||||
readinessProbe:
|
||||
{{- toYaml .Values.readinessProbe | nindent 12 }}
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
{{- with .Values.volumeMounts }}
|
||||
volumeMounts:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.volumes }}
|
||||
volumes:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
43
contrib/k8s/helm/prowler-ui/templates/ingress.yaml
Normal file
43
contrib/k8s/helm/prowler-ui/templates/ingress.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
{{- if .Values.ingress.enabled -}}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ include "prowler-ui.fullname" . }}
|
||||
labels:
|
||||
{{- include "prowler-ui.labels" . | nindent 4 }}
|
||||
{{- with .Values.ingress.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.ingress.className }}
|
||||
ingressClassName: {{ . }}
|
||||
{{- end }}
|
||||
{{- if .Values.ingress.tls }}
|
||||
tls:
|
||||
{{- range .Values.ingress.tls }}
|
||||
- hosts:
|
||||
{{- range .hosts }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
secretName: {{ .secretName }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
rules:
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- host: {{ .host | quote }}
|
||||
http:
|
||||
paths:
|
||||
{{- range .paths }}
|
||||
- path: {{ .path }}
|
||||
{{- with .pathType }}
|
||||
pathType: {{ . }}
|
||||
{{- end }}
|
||||
backend:
|
||||
service:
|
||||
name: {{ include "prowler-ui.fullname" $ }}
|
||||
port:
|
||||
number: {{ $.Values.service.port }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user