mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-03-31 05:06:57 +00:00
Compare commits
343 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5be859d86c | ||
|
|
5340008c8f | ||
|
|
a5aa4c30d7 | ||
|
|
56d0c2fbea | ||
|
|
6b7ef199e0 | ||
|
|
933c5063ee | ||
|
|
72a998a692 | ||
|
|
a4d1e3bb69 | ||
|
|
2a8b04cced | ||
|
|
cfc02186d4 | ||
|
|
7e432a3b69 | ||
|
|
fed8de314f | ||
|
|
ce23c4b5aa | ||
|
|
33e99ef628 | ||
|
|
72761a6ef6 | ||
|
|
2a4fdff827 | ||
|
|
8e264313bc | ||
|
|
21654b0bc0 | ||
|
|
7fa57ba3e2 | ||
|
|
ea9b6bb191 | ||
|
|
95acb7b0c8 | ||
|
|
d23edfa337 | ||
|
|
40678a5863 | ||
|
|
23aded92a3 | ||
|
|
6e56d3862d | ||
|
|
d95fccd163 | ||
|
|
7ddf860a55 | ||
|
|
3f41c75a45 | ||
|
|
04b6dbf639 | ||
|
|
ff4d16deb5 | ||
|
|
562921cd5e | ||
|
|
8f061e4fed | ||
|
|
3fb86d754a | ||
|
|
7874707310 | ||
|
|
1c934e37c7 | ||
|
|
8459cff16d | ||
|
|
57ae096395 | ||
|
|
200185de25 | ||
|
|
f8447b0f79 | ||
|
|
19289bbe20 | ||
|
|
b5b371fa0c | ||
|
|
939a623cec | ||
|
|
926f449ae6 | ||
|
|
646668c6ae | ||
|
|
8e6b92792b | ||
|
|
65c081ce38 | ||
|
|
5600131d6a | ||
|
|
dbd271980f | ||
|
|
ff532a899e | ||
|
|
0c9675ec70 | ||
|
|
d45eda2b2b | ||
|
|
0abcf80d19 | ||
|
|
c0e10fd395 | ||
|
|
a80e9b26a8 | ||
|
|
2a9cd57fb8 | ||
|
|
a0ad1a5f49 | ||
|
|
dff22dd166 | ||
|
|
58138810b9 | ||
|
|
1ecc272fe4 | ||
|
|
b784167006 | ||
|
|
cf0ec8dea0 | ||
|
|
96cae5e961 | ||
|
|
a48e5cb15f | ||
|
|
5a9ff007e0 | ||
|
|
24c45f894c | ||
|
|
5d03c85629 | ||
|
|
41dc397a7a | ||
|
|
237a9adce9 | ||
|
|
a06167f1c2 | ||
|
|
a7d58c40dd | ||
|
|
e260c46389 | ||
|
|
115169a596 | ||
|
|
5b19173c1d | ||
|
|
d3dd1644e6 | ||
|
|
8ff0c59964 | ||
|
|
285939c389 | ||
|
|
a62ae8af51 | ||
|
|
5d78b9e439 | ||
|
|
1056c270ca | ||
|
|
eeef6600b7 | ||
|
|
e142f17abe | ||
|
|
a65d858dac | ||
|
|
6235a1ba41 | ||
|
|
05007d03ee | ||
|
|
102d099947 | ||
|
|
3194675a5c | ||
|
|
14e6e4aa68 | ||
|
|
b24c3665b5 | ||
|
|
1f60878867 | ||
|
|
2dd18662d8 | ||
|
|
175360dbe6 | ||
|
|
80e24b971f | ||
|
|
78877c470a | ||
|
|
9c9b100359 | ||
|
|
10f3232294 | ||
|
|
a2e5f70f36 | ||
|
|
8d8b31c757 | ||
|
|
cba1e718b9 | ||
|
|
6c3c37fc26 | ||
|
|
b610cacd0c | ||
|
|
027a5705cb | ||
|
|
b7fbfb4360 | ||
|
|
5acf0a7e3d | ||
|
|
3a25e86e30 | ||
|
|
50f1592eb3 | ||
|
|
0f2927cb88 | ||
|
|
b4e1434052 | ||
|
|
43710783f9 | ||
|
|
16f767e7b9 | ||
|
|
42818217a0 | ||
|
|
13405594b2 | ||
|
|
b5a3852334 | ||
|
|
181ff1acb3 | ||
|
|
91e59a3279 | ||
|
|
b3fad1a765 | ||
|
|
6f90927a79 | ||
|
|
0bb4a9a3e9 | ||
|
|
80d9cde60b | ||
|
|
11196c2f83 | ||
|
|
55a0d0a1b5 | ||
|
|
4e5e1d7bd4 | ||
|
|
06a0a434ab | ||
|
|
153833fc55 | ||
|
|
fc4877975f | ||
|
|
0797efd4fd | ||
|
|
fbec99a0b7 | ||
|
|
b2cb1de95e | ||
|
|
190c2316d7 | ||
|
|
f6c352281a | ||
|
|
66dfe89936 | ||
|
|
8b3942ca49 | ||
|
|
9d35213bd5 | ||
|
|
3e586e615d | ||
|
|
a4f950e093 | ||
|
|
7c2441f6ff | ||
|
|
0a92af3eb2 | ||
|
|
666f3a0e20 | ||
|
|
06ef98b5cc | ||
|
|
79125bdd40 | ||
|
|
e8e8b085ac | ||
|
|
c9b81d003a | ||
|
|
23fa3c1e38 | ||
|
|
03fbd0baca | ||
|
|
d4fe24ef47 | ||
|
|
9c5220ee98 | ||
|
|
6491bce5a6 | ||
|
|
ca375dd79c | ||
|
|
e807573b54 | ||
|
|
c0f4c9743f | ||
|
|
5974d0b5da | ||
|
|
6244a8a5f7 | ||
|
|
5b9dae4529 | ||
|
|
a424374c44 | ||
|
|
b7fc2542e8 | ||
|
|
83a1598a1e | ||
|
|
b22b56a06b | ||
|
|
5020e4713c | ||
|
|
ee534a740e | ||
|
|
48cb45b7a8 | ||
|
|
91b74822e9 | ||
|
|
287eef5085 | ||
|
|
45d359c84a | ||
|
|
6049e5d4e8 | ||
|
|
dfd377f89e | ||
|
|
37e6c52c14 | ||
|
|
d6a7f4d88f | ||
|
|
239cda0a90 | ||
|
|
4a821e425b | ||
|
|
e1a2f0c204 | ||
|
|
c70860c733 | ||
|
|
05e71e033f | ||
|
|
5164ec2eb9 | ||
|
|
be18dac4f9 | ||
|
|
bb126c242f | ||
|
|
e27780a856 | ||
|
|
196ec51751 | ||
|
|
86abf9e64c | ||
|
|
9d8be578e3 | ||
|
|
b3aa800082 | ||
|
|
501674a778 | ||
|
|
5ff6ae79d8 | ||
|
|
e518a869ab | ||
|
|
43927a62f3 | ||
|
|
335980c8d8 | ||
|
|
ca3ee378db | ||
|
|
c05bc1068a | ||
|
|
2e3164636d | ||
|
|
c34e07fc40 | ||
|
|
6022122a61 | ||
|
|
f65f5e4b46 | ||
|
|
dee17733a0 | ||
|
|
cddda1e64e | ||
|
|
f7b873db03 | ||
|
|
792bc70d0a | ||
|
|
185491b061 | ||
|
|
3af8a43480 | ||
|
|
fd78406b29 | ||
|
|
4758b258a3 | ||
|
|
015e2b3b88 | ||
|
|
e184c9cb61 | ||
|
|
9004a01183 | ||
|
|
dd65ba3d9e | ||
|
|
bba616a18f | ||
|
|
aa0f8d2981 | ||
|
|
2511d6ffa9 | ||
|
|
27329457be | ||
|
|
7189f3d526 | ||
|
|
58e7589c9d | ||
|
|
d60f4b5ded | ||
|
|
4c2ec094f6 | ||
|
|
395ecaff5b | ||
|
|
c39506ef7d | ||
|
|
eb90d479e2 | ||
|
|
b92a73f5ea | ||
|
|
ad121f3059 | ||
|
|
70e4c5a44e | ||
|
|
b5a46b7b59 | ||
|
|
f1a97cd166 | ||
|
|
0774508093 | ||
|
|
0664ce6b94 | ||
|
|
407c779c52 | ||
|
|
c60f13f23f | ||
|
|
37d912ef01 | ||
|
|
d3de89c017 | ||
|
|
cb22af25c6 | ||
|
|
a534b94495 | ||
|
|
6262b4ff0b | ||
|
|
84ecd7ab2c | ||
|
|
1a5428445a | ||
|
|
ac8e991ca0 | ||
|
|
83a0331472 | ||
|
|
cce31e2971 | ||
|
|
0adf7d6e77 | ||
|
|
295f8b557e | ||
|
|
bb2c5c3161 | ||
|
|
0018f36a36 | ||
|
|
857de84f49 | ||
|
|
9630f2242a | ||
|
|
1fe125867c | ||
|
|
0737893240 | ||
|
|
282fe3d348 | ||
|
|
b5d83640ae | ||
|
|
2823d3ad21 | ||
|
|
00b93bfe86 | ||
|
|
84c253d887 | ||
|
|
2ab5a702c9 | ||
|
|
11d9cdf24e | ||
|
|
b1de41619b | ||
|
|
18a4881a51 | ||
|
|
de76a168c0 | ||
|
|
bcc13a742d | ||
|
|
23b584f4bf | ||
|
|
074b7c1ff5 | ||
|
|
c04e9ed914 | ||
|
|
1f1b126e79 | ||
|
|
9b0dd80f13 | ||
|
|
b0807478f2 | ||
|
|
11dfa58ecd | ||
|
|
412f396e0a | ||
|
|
8100d43ff2 | ||
|
|
bc96acef48 | ||
|
|
6e9876b61a | ||
|
|
32253ca4f7 | ||
|
|
4c6be5e283 | ||
|
|
69178fd7bd | ||
|
|
cd4432c14b | ||
|
|
0e47172aec | ||
|
|
efe18ae8b2 | ||
|
|
d5e13df4fe | ||
|
|
b0e84d74f2 | ||
|
|
28526b591c | ||
|
|
51e6a6edb1 | ||
|
|
c1b132a29e | ||
|
|
e2530e7d57 | ||
|
|
f2fcb1599b | ||
|
|
160eafa0c9 | ||
|
|
c2bc0f1368 | ||
|
|
27d27fff81 | ||
|
|
66e8f0ce18 | ||
|
|
0ceae8361b | ||
|
|
5e52fded83 | ||
|
|
0a7d07b4b6 | ||
|
|
34b5f483d7 | ||
|
|
9d04a1bc52 | ||
|
|
b25c0aaa00 | ||
|
|
652b93ea45 | ||
|
|
ccb7726511 | ||
|
|
c514a4e451 | ||
|
|
d841bd6890 | ||
|
|
ff54e10ab2 | ||
|
|
718e562741 | ||
|
|
ce05e2a939 | ||
|
|
90831d3084 | ||
|
|
2c928dead5 | ||
|
|
3ffe147664 | ||
|
|
3373b0f47c | ||
|
|
b29db04560 | ||
|
|
f964a0362e | ||
|
|
537b23dfae | ||
|
|
48cf3528b4 | ||
|
|
3c4b9d32c9 | ||
|
|
fd8361ae2c | ||
|
|
33cadaa932 | ||
|
|
cc126eb8b4 | ||
|
|
c996b3f6aa | ||
|
|
e2b406a300 | ||
|
|
c2c69da603 | ||
|
|
4e51348ff2 | ||
|
|
3257b82706 | ||
|
|
c98d764daa | ||
|
|
0448429a9f | ||
|
|
b948ac6125 | ||
|
|
3acc09ea16 | ||
|
|
82d0d0de9d | ||
|
|
f9cfc4d087 | ||
|
|
7d68ff455b | ||
|
|
ddf4881971 | ||
|
|
9ad4944142 | ||
|
|
7f33ea76a4 | ||
|
|
1140c29384 | ||
|
|
2441a62f39 | ||
|
|
c26a231fc1 | ||
|
|
2fb2315037 | ||
|
|
a9e475481a | ||
|
|
826d7c4dc3 | ||
|
|
b7f4b37f66 | ||
|
|
193d691bfe | ||
|
|
a359bc581c | ||
|
|
9a28ff025a | ||
|
|
f1c7050700 | ||
|
|
9391c27b9e | ||
|
|
4c54de092f | ||
|
|
690c482a43 | ||
|
|
ad2d857c6f | ||
|
|
07ee59d2ef | ||
|
|
bec4617d0a | ||
|
|
94916f8305 | ||
|
|
44de651be3 | ||
|
|
bdcba9c642 | ||
|
|
c172f75f1a | ||
|
|
ec492fa13a | ||
|
|
702659959c | ||
|
|
fef332a591 |
34
.env
34
.env
@@ -4,17 +4,13 @@
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
SITE_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
# Google Tag Manager ID
|
||||
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
|
||||
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="stable"
|
||||
@@ -28,10 +24,6 @@ POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Celery-Prowler task settings
|
||||
TASK_RETRY_DELAY_SECONDS=0.1
|
||||
TASK_RETRY_ATTEMPTS=5
|
||||
|
||||
# Valkey settings
|
||||
# If running Valkey and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=valkey
|
||||
@@ -41,10 +33,10 @@ VALKEY_DB=0
|
||||
# API scan settings
|
||||
|
||||
# The path to the directory where scan output should be stored
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY = "/tmp/prowler_api_output"
|
||||
|
||||
# The maximum number of findings to process in a single batch
|
||||
DJANGO_FINDINGS_BATCH_SIZE=1000
|
||||
DJANGO_FINDINGS_BATCH_SIZE = 1000
|
||||
|
||||
# The AWS access key to be used when uploading scan output to an S3 bucket
|
||||
# If left empty, default AWS credentials resolution behavior will be used
|
||||
@@ -131,22 +123,4 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.7.5
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
# Single Sign-On (SSO)
|
||||
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
|
||||
|
||||
# Lighthouse tracing
|
||||
LANGSMITH_TRACING=false
|
||||
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
|
||||
LANGSMITH_API_KEY=""
|
||||
LANGCHAIN_PROJECT=""
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.4.3
|
||||
|
||||
156
.github/dependabot.yml
vendored
156
.github/dependabot.yml
vendored
@@ -9,112 +9,108 @@ updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/api"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "component/api"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/api"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "npm"
|
||||
# directory: "/ui"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "npm"
|
||||
# - "component/ui"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/ui"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "npm"
|
||||
- "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/04/15
|
||||
# v4.6
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v4"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v4"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v4"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v4"
|
||||
|
||||
# - package-ecosystem: "docker"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "docker"
|
||||
# - "v4"
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
- "v4"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# v3
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v3"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v3"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
|
||||
5
.github/labeler.yml
vendored
5
.github/labeler.yml
vendored
@@ -27,11 +27,6 @@ provider/github:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
provider/iac:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/iac/**"
|
||||
- any-glob-to-any-file: "tests/providers/iac/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
|
||||
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@@ -16,7 +16,6 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/prowler/CHANGELOG.md), if applicable.
|
||||
|
||||
#### API
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -70,18 +70,18 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
6
.github/workflows/api-codeql.yml
vendored
6
.github/workflows/api-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
67
.github/workflows/api-pull-request.yml
vendored
67
.github/workflows/api-pull-request.yml
vendored
@@ -28,10 +28,6 @@ env:
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
IGNORE_FILES: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
api/CHANGELOG.md
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -75,54 +71,39 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
files_ignore: |
|
||||
api/.github/**
|
||||
api/docs/**
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Update poetry.lock after the branch name change
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install system dependencies for xmlsec
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -136,12 +117,6 @@ jobs:
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Prevents known compatibility error between lxml and libxml2/libxmlsec versions - https://github.com/xmlsec/python-xmlsec/issues/320
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pip install --force-reinstall --no-binary lxml lxml
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -169,9 +144,8 @@ jobs:
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323
|
||||
poetry run safety check --ignore 70612,66963
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
@@ -193,7 +167,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -201,20 +175,11 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build Container
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
|
||||
4
.github/workflows/backport.yml
vendored
4
.github/workflows/backport.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
|
||||
2
.github/workflows/conventional-commit.yml
vendored
2
.github/workflows/conventional-commit.yml
vendored
@@ -18,6 +18,6 @@ jobs:
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
uses: agenthunt/conventional-commit-checker-action@v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'
|
||||
|
||||
67
.github/workflows/create-backport-label.yml
vendored
67
.github/workflows/create-backport-label.yml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Create Backport Label
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
create_label:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Create backport label
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
OWNER_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
|
||||
|
||||
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
|
||||
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
|
||||
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
|
||||
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
|
||||
echo "Effective description will be: ${FINAL_DESCRIPTION}"
|
||||
|
||||
# Check if the label already exists
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
|
||||
|
||||
if [ "${STATUS_CODE}" -eq 200 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' already exists."
|
||||
elif [ "${STATUS_CODE}" -eq 404 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
|
||||
# Prepare JSON data payload
|
||||
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
|
||||
|
||||
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
--data "${JSON_DATA}" \
|
||||
"https://api.github.com/repos/${OWNER_REPO}/labels")
|
||||
|
||||
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
|
||||
rm -f /tmp/curl_create_response.json
|
||||
|
||||
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' created successfully."
|
||||
else
|
||||
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
|
||||
echo "Response: $CREATE_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
|
||||
exit 0
|
||||
fi
|
||||
4
.github/workflows/find-secrets.yml
vendored
4
.github/workflows/find-secrets.yml
vendored
@@ -7,11 +7,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@6641d4ba5b684fffe195b9820345de1bf19f3181 # v3.89.2
|
||||
uses: trufflesecurity/trufflehog@v3.88.14
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
- uses: actions/labeler@v5
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
name: Check Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of changed files
|
||||
id: changed_files
|
||||
run: |
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
|
||||
cat changed_files.txt
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check_folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
if grep -q "^${folder}/" changed_files.txt; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
|
||||
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Comment on PR if changelog is missing
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs != ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
${{ steps.check_folders.outputs.missing_changelogs }}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.
|
||||
|
||||
- name: Comment on PR if all changelogs are present
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs == ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check_folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "ERROR: Missing changelog updates in some folders."
|
||||
exit 1
|
||||
37
.github/workflows/pull-request-merged.yml
vendored
37
.github/workflows/pull-request-merged.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: Prowler - Merged Pull Request
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['closed']
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
name: Trigger Cloud Pull Request
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-pull-request-merged
|
||||
client-payload: '{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_TITLE": "${{ github.event.pull_request.title }}",
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL":${{ toJson(github.event.pull_request.html_url) }}
|
||||
}'
|
||||
@@ -59,16 +59,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==2.*
|
||||
pipx install poetry==1.8.5
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
@@ -108,13 +108,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -123,11 +123,11 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
145
.github/workflows/sdk-bump-version.yml
vendored
145
.github/workflows/sdk-bump-version.yml
vendored
@@ -1,145 +0,0 @@
|
||||
name: SDK - Bump Version
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
name: Bump Version
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Prowler version
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
FIX_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to GitHub environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIX_VERSION=${FIX_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if (( MAJOR_VERSION == 5 )); then
|
||||
if (( FIX_VERSION == 0 )); then
|
||||
echo "Minor Release: $PROWLER_VERSION"
|
||||
|
||||
# Set up next minor version for master
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).${FIX_VERSION}
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=${BASE_BRANCH}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set up patch version for version branch
|
||||
PATCH_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
echo "PATCH_VERSION_TO=${PATCH_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next minor version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
echo "Bumping to next patch version: ${PATCH_VERSION_TO} in branch ${VERSION_BRANCH}"
|
||||
else
|
||||
echo "Patch Release: $PROWLER_VERSION"
|
||||
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.$((FIX_VERSION + 1))
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next patch version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
fi
|
||||
else
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Bump versions in files
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using BUMP_VERSION_TO=$BUMP_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${BUMP_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${BUMP_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${BUMP_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.TARGET_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.BUMP_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Handle patch version for minor release
|
||||
if: env.FIX_VERSION == '0'
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using PATCH_VERSION_TO=$PATCH_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${PATCH_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${PATCH_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PATCH_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request for patch version
|
||||
if: env.FIX_VERSION == '0'
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.PATCH_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
8
.github/workflows/sdk-codeql.yml
vendored
8
.github/workflows/sdk-codeql.yml
vendored
@@ -21,7 +21,6 @@ on:
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
@@ -31,7 +30,6 @@ on:
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
@@ -52,16 +50,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
145
.github/workflows/sdk-pull-request.yml
vendored
145
.github/workflows/sdk-pull-request.yml
vendored
@@ -21,11 +21,11 @@ jobs:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -34,7 +34,6 @@ jobs:
|
||||
permissions/**
|
||||
api/**
|
||||
ui/**
|
||||
prowler/CHANGELOG.md
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
@@ -47,11 +46,11 @@ jobs:
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -59,7 +58,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -107,143 +106,15 @@ jobs:
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
# Test AWS
|
||||
- name: AWS - Check if any file has changed
|
||||
id: aws-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/aws/**
|
||||
./tests/providers/aws/**
|
||||
.poetry.lock
|
||||
|
||||
- name: AWS - Test
|
||||
if: steps.aws-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
|
||||
# Test Azure
|
||||
- name: Azure - Check if any file has changed
|
||||
id: azure-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/azure/**
|
||||
./tests/providers/azure/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Azure - Test
|
||||
if: steps.azure-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
|
||||
|
||||
# Test GCP
|
||||
- name: GCP - Check if any file has changed
|
||||
id: gcp-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/gcp/**
|
||||
./tests/providers/gcp/**
|
||||
.poetry.lock
|
||||
|
||||
- name: GCP - Test
|
||||
if: steps.gcp-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
|
||||
|
||||
# Test Kubernetes
|
||||
- name: Kubernetes - Check if any file has changed
|
||||
id: kubernetes-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/kubernetes/**
|
||||
./tests/providers/kubernetes/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Kubernetes - Test
|
||||
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
|
||||
|
||||
# Test GitHub
|
||||
- name: GitHub - Check if any file has changed
|
||||
id: github-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/github/**
|
||||
./tests/providers/github/**
|
||||
.poetry.lock
|
||||
|
||||
- name: GitHub - Test
|
||||
if: steps.github-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
|
||||
|
||||
# Test NHN
|
||||
- name: NHN - Check if any file has changed
|
||||
id: nhn-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/nhn/**
|
||||
./tests/providers/nhn/**
|
||||
.poetry.lock
|
||||
|
||||
- name: NHN - Test
|
||||
if: steps.nhn-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
|
||||
|
||||
# Test M365
|
||||
- name: M365 - Check if any file has changed
|
||||
id: m365-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/m365/**
|
||||
./tests/providers/m365/**
|
||||
.poetry.lock
|
||||
|
||||
- name: M365 - Test
|
||||
if: steps.m365-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
# Test IaC
|
||||
- name: IaC - Check if any file has changed
|
||||
id: iac-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/iac/**
|
||||
./tests/providers/iac/**
|
||||
.poetry.lock
|
||||
|
||||
- name: IaC - Test
|
||||
if: steps.iac-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
- name: Test with pytest
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
|
||||
- name: Config - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
|
||||
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./lib_coverage.xml,./config_coverage.xml
|
||||
|
||||
10
.github/workflows/sdk-pypi-release.yml
vendored
10
.github/workflows/sdk-pypi-release.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
# CACHE: "poetry"
|
||||
CACHE: "poetry"
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -64,17 +64,17 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
# cache: ${{ env.CACHE }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
|
||||
@@ -4,7 +4,7 @@ name: SDK - Refresh AWS services' regions
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * 1" # runs at 09:00 UTC every Monday
|
||||
- cron: "0 9 * * *" #runs at 09:00 UTC everyday
|
||||
|
||||
env:
|
||||
GITHUB_BRANCH: "master"
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -50,13 +50,12 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -30,7 +30,6 @@ env:
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -62,7 +61,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -71,23 +70,22 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
@@ -98,12 +96,11 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
@@ -113,7 +110,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
6
.github/workflows/ui-codeql.yml
vendored
6
.github/workflows/ui-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
61
.github/workflows/ui-pull-request.yml
vendored
61
.github/workflows/ui-pull-request.yml
vendored
@@ -27,79 +27,30 @@ jobs:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
run: npm install
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
|
||||
e2e-tests:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AUTH_SECRET: 'fallback-ci-secret-for-testing'
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: http://localhost:3000
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Run Playwright tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
- name: Upload Playwright report
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -42,9 +42,6 @@ junit-reports/
|
||||
# VSCode files
|
||||
.vscode/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
*.tfstate
|
||||
@@ -53,7 +50,6 @@ junit-reports/
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
|
||||
@@ -59,7 +59,7 @@ repos:
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.1.1
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
@@ -68,7 +68,7 @@ repos:
|
||||
|
||||
- id: poetry-lock
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
args: ["--no-update", "--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
@@ -78,7 +78,7 @@ repos:
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
args: ["--no-update", "--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
|
||||
@@ -115,7 +115,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353'
|
||||
entry: bash -c 'safety check --ignore 70612,66963'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
61
Dockerfile
61
Dockerfile
@@ -1,61 +1,38 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
FROM python:3.12.9-alpine3.20
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
|
||||
# Create non-root user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
|
||||
echo 'prowler:x:1000:' > /etc/group && \
|
||||
chown -R prowler:prowler /home/prowler
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler/
|
||||
COPY prowler/providers/m365/lib/powershell/m365_powershell.py /home/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
# Install PowerShell modules
|
||||
RUN poetry run python prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
ENTRYPOINT ["prowler"]
|
||||
|
||||
174
README.md
174
README.md
@@ -3,7 +3,7 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler</b> is the Open Cloud Security platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
@@ -43,29 +43,15 @@
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an open-source security tool designed to assess and enforce security best practices across AWS, Azure, Google Cloud, and Kubernetes. It supports tasks such as security audits, incident response, continuous monitoring, system hardening, forensic readiness, and remediation processes.
|
||||
|
||||
Prowler includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
|
||||
|
||||
- **Industry Standards:** CIS, NIST 800, NIST CSF, and CISA
|
||||
- **Regulatory Compliance and Governance:** RBI, FedRAMP, and PCI-DSS
|
||||
- **Frameworks for Sensitive Data and Privacy:** GDPR, HIPAA, and FFIEC
|
||||
- **Frameworks for Organizational Governance and Quality Control:** SOC2 and GXP
|
||||
- **AWS-Specific Frameworks:** AWS Foundational Technical Review (FTR) and AWS Well-Architected Framework (Security Pillar)
|
||||
- **National Security Standards:** ENS (Spanish National Security Scheme)
|
||||
- **Custom Security Frameworks:** Tailored to your needs
|
||||
|
||||
## Prowler CLI and Prowler Cloud
|
||||
|
||||
Prowler offers a Command Line Interface (CLI), known as Prowler Open Source, and an additional service built on top of it, called <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
|
||||
Prowler App is a web application that allows you to run Prowler in your cloud provider accounts and visualize the results in a user-friendly interface.
|
||||
|
||||

|
||||
|
||||
>For more details, refer to the [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
>More details at [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
@@ -74,7 +60,6 @@ prowler <provider>
|
||||
```
|
||||

|
||||
|
||||
|
||||
## Prowler Dashboard
|
||||
|
||||
```console
|
||||
@@ -82,34 +67,25 @@ prowler dashboard
|
||||
```
|
||||

|
||||
|
||||
# Prowler at a Glance
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 567 | 82 | 36 | 10 |
|
||||
| GCP | 79 | 13 | 10 | 3 |
|
||||
| Azure | 142 | 18 | 10 | 3 |
|
||||
| Kubernetes | 83 | 7 | 5 | 7 |
|
||||
| GitHub | 16 | 2 | 1 | 0 |
|
||||
| M365 | 69 | 7 | 3 | 2 |
|
||||
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
|
||||
| AWS | 564 | 82 | 33 | 10 |
|
||||
| GCP | 77 | 13 | 5 | 3 |
|
||||
| Azure | 140 | 18 | 6 | 3 |
|
||||
| Kubernetes | 83 | 7 | 2 | 7 |
|
||||
| Microsoft365 | 5 | 2 | 1 | 0 |
|
||||
|
||||
> [!Note]
|
||||
> The numbers in the table are updated periodically.
|
||||
|
||||
> [!Tip]
|
||||
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
|
||||
|
||||
> [!Note]
|
||||
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories: `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
> You can list the checks, services, compliance frameworks and categories with `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App offers flexible installation methods tailored to various environments:
|
||||
Prowler App can be installed in different ways, depending on your environment:
|
||||
|
||||
> For detailed instructions on using Prowler App, refer to the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
> See how to use Prowler App in the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
|
||||
### Docker Compose
|
||||
|
||||
@@ -125,23 +101,15 @@ curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/mast
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`.
|
||||
|
||||
### Configuring Your Workstation for Prowler App
|
||||
|
||||
If your workstation's architecture is incompatible, you can resolve this by:
|
||||
|
||||
- **Setting the environment variable**: `DOCKER_DEFAULT_PLATFORM=linux/amd64`
|
||||
- **Using the following flag in your Docker command**: `--platform linux/amd64`
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
@@ -160,12 +128,12 @@ python manage.py migrate --database admin
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
> After completing the setup, access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
**Commands to run the API Worker**
|
||||
|
||||
@@ -203,31 +171,29 @@ npm run build
|
||||
npm start
|
||||
```
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/). Consequently, it can be installed using pip with Python >3.9.1, <3.13:
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python > 3.9.1, < 3.13:
|
||||
|
||||
```console
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
>For further guidance, refer to [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
|
||||
### Containers
|
||||
|
||||
**Available Versions of Prowler CLI**
|
||||
The available versions of Prowler CLI are the following:
|
||||
|
||||
The following versions of Prowler CLI are available, depending on your requirements:
|
||||
|
||||
- `latest`: Synchronizes with the `master` branch. Note that this version is not stable.
|
||||
- `v4-latest`: Synchronizes with the `v4` branch. Note that this version is not stable.
|
||||
- `v3-latest`: Synchronizes with the `v3` branch. Note that this version is not stable.
|
||||
- `<x.y.z>` (release): Stable releases corresponding to specific versions. You can find the complete list of releases [here](https://github.com/prowler-cloud/prowler/releases).
|
||||
- `stable`: Always points to the latest release.
|
||||
- `v4-stable`: Always points to the latest release for v4.
|
||||
- `v3-stable`: Always points to the latest release for v3.
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v4-latest`: in sync with `v4` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v4-stable`: this tag always point to the latest release for v4.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
- Prowler CLI:
|
||||
@@ -239,56 +205,35 @@ The container images are available here:
|
||||
|
||||
### From GitHub
|
||||
|
||||
Python >3.9.1, <3.13 is required with pip and Poetry:
|
||||
Python > 3.9.1, < 3.13 is required with pip and poetry:
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
python prowler-cli.py -v
|
||||
python prowler.py -v
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> To clone Prowler on Windows, configure Git to support long file paths by running the following command: `git config core.longpaths true`.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
# ✏️ High level architecture
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
## Prowler App
|
||||
**Prowler App** is composed of three key components:
|
||||
The **Prowler App** consists of three main components:
|
||||
|
||||
- **Prowler UI**: A web-based interface, built with Next.js, providing a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
- **Prowler API**: A backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
|
||||
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
|
||||
- **Prowler UI**: A user-friendly web interface for running Prowler and viewing results, powered by Next.js.
|
||||
- **Prowler API**: The backend API that executes Prowler scans and stores the results, built with Django REST Framework.
|
||||
- **Prowler SDK**: A Python SDK that integrates with the Prowler CLI for advanced functionality.
|
||||
|
||||

|
||||
|
||||
## Prowler CLI
|
||||
|
||||
**Running Prowler**
|
||||
|
||||
Prowler can be executed across various environments, offering flexibility to meet your needs. It can be run from:
|
||||
|
||||
- Your own workstation
|
||||
|
||||
- A Kubernetes Job
|
||||
|
||||
- Google Compute Engine
|
||||
|
||||
- Azure Virtual Machines (VMs)
|
||||
|
||||
- Amazon EC2 instances
|
||||
|
||||
- AWS Fargate or other container platforms
|
||||
|
||||
- CloudShell
|
||||
|
||||
And many more environments.
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||

|
||||
|
||||
@@ -296,36 +241,23 @@ And many more environments.
|
||||
|
||||
## General
|
||||
- `Allowlist` now is called `Mutelist`.
|
||||
- The `--quiet` option has been deprecated. Use the `--status` flag to filter findings based on their status: PASS, FAIL, or MANUAL.
|
||||
- All findings with an `INFO` status have been reclassified as `MANUAL`.
|
||||
- The CSV output format is standardized across all providers.
|
||||
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
|
||||
- All `INFO` finding's status has changed to `MANUAL`.
|
||||
- The CSV output format is common for all the providers.
|
||||
|
||||
**Deprecated Output Formats**
|
||||
|
||||
The following formats are now deprecated:
|
||||
- Native JSON has been replaced with JSON in [OCSF] v1.1.0 format, which is standardized across all providers (https://schema.ocsf.io/).
|
||||
We have deprecated some of our outputs formats:
|
||||
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
|
||||
|
||||
## AWS
|
||||
|
||||
**AWS Flag Deprecation**
|
||||
|
||||
The flag --sts-endpoint-region has been deprecated due to the adoption of AWS STS regional tokens.
|
||||
|
||||
**Sending FAIL Results to AWS Security Hub**
|
||||
|
||||
- To send only FAILS to AWS Security Hub, use one of the following options: `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
|
||||
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
**Documentation Resources**
|
||||
|
||||
For installation instructions, usage details, tutorials, and the Developer Guide, visit https://docs.prowler.com/
|
||||
Install, Usage, Tutorials and Developer Guide is at https://docs.prowler.com/
|
||||
|
||||
# 📃 License
|
||||
|
||||
**Prowler License Information**
|
||||
|
||||
Prowler is licensed under the Apache License 2.0, as indicated in each file within the repository. Obtaining a Copy of the License
|
||||
|
||||
A copy of the License is available at <http://www.apache.org/licenses/LICENSE-2.0>
|
||||
Prowler is licensed as Apache License 2.0 as specified in each file. You may obtain a copy of the License at
|
||||
<http://www.apache.org/licenses/LICENSE-2.0>
|
||||
|
||||
168
api/.gitignore
vendored
Normal file
168
api/.gitignore
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
/_data/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
91
api/.pre-commit-config.yaml
Normal file
91
api/.pre-commit-config.yaml
Normal file
@@ -0,0 +1,91 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
|
||||
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
args: ["--directory=src"]
|
||||
- id: poetry-lock
|
||||
args: ["--no-update", "--directory=src"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013", "Dockerfile"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
147
api/CHANGELOG.md
147
api/CHANGELOG.md
@@ -2,151 +2,30 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [v1.10.0] (Prowler UNRELEASED)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.1] (Prowler v5.8.1)
|
||||
|
||||
### Added
|
||||
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
|
||||
|
||||
### Changed
|
||||
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
|
||||
|
||||
### Fixed
|
||||
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
|
||||
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
|
||||
|
||||
### Removed
|
||||
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.0] (Prowler v5.8.0)
|
||||
|
||||
### Added
|
||||
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
|
||||
|
||||
### Changed
|
||||
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
|
||||
|
||||
### Fixed
|
||||
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.5] (Prowler v5.7.5)
|
||||
|
||||
### Fixed
|
||||
- Normalize provider UID to ensure safe and unique export directory paths [(#8007)](https://github.com/prowler-cloud/prowler/pull/8007).
|
||||
- Blank resource types in `/metadata` endpoints [(#8027)](https://github.com/prowler-cloud/prowler/pull/8027)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.4] (Prowler v5.7.4)
|
||||
|
||||
### Removed
|
||||
- Reverted RLS transaction handling and DB custom backend [(#7994)](https://github.com/prowler-cloud/prowler/pull/7994)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.3] (Prowler v5.7.3)
|
||||
|
||||
### Added
|
||||
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935)
|
||||
|
||||
### Changed
|
||||
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
|
||||
|
||||
### Fixed
|
||||
- Transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916)
|
||||
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.2] (Prowler v5.7.2)
|
||||
|
||||
### Fixed
|
||||
- Task lookup to use task_kwargs instead of task_args for scan report resolution [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
|
||||
- Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
|
||||
- Connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
|
||||
- Race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876)
|
||||
- Error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.1] (Prowler v5.7.1)
|
||||
|
||||
### Fixed
|
||||
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.0] (Prowler v5.7.0)
|
||||
|
||||
### Added
|
||||
- Huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- New endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743)
|
||||
- Export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
|
||||
---
|
||||
|
||||
## [v1.7.0] (Prowler v5.6.0)
|
||||
|
||||
### Added
|
||||
|
||||
- M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563)
|
||||
- `compliance/` folder and ZIP‐export functionality for all compliance reports [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
- API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
|
||||
---
|
||||
|
||||
## [v1.6.0] (Prowler v5.5.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167)
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289)
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333)
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378)
|
||||
- Missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.4] (Prowler v5.4.4)
|
||||
|
||||
### Fixed
|
||||
- Bug with periodic tasks when trying to delete a provider [(#7466)](https://github.com/prowler-cloud/prowler/pull/7466)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Duplicated scheduled scans handling [(#7401)](https://github.com/prowler-cloud/prowler/pull/7401)
|
||||
- Environment variable to configure the deletion task batch size [(#7423)](https://github.com/prowler-cloud/prowler/pull/7423)
|
||||
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
|
||||
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349)
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
|
||||
### Fixed
|
||||
- Handle response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183)
|
||||
- Race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172)
|
||||
- Handle exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283)
|
||||
- Added a handled response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183).
|
||||
- Fixed a race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172).
|
||||
- Handled exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283).
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -154,20 +33,20 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Added
|
||||
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
|
||||
- API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878)
|
||||
- Add API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878).
|
||||
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
|
||||
|
||||
### Changed
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019)
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019).
|
||||
|
||||
---
|
||||
|
||||
## [v1.4.0] (Prowler v5.3.0)
|
||||
|
||||
### Changed
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700)
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800)
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863)
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869)
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
|
||||
- Increase the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
|
||||
---
|
||||
|
||||
@@ -1,45 +1,13 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
FROM python:3.12.8-alpine3.20 AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget \
|
||||
libicu72 \
|
||||
gcc \
|
||||
g++ \
|
||||
make \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
pkg-config \
|
||||
libtool \
|
||||
libxslt1-dev \
|
||||
python3-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add gcc python3-dev musl-dev linux-headers curl-dev
|
||||
|
||||
RUN apk --no-cache upgrade && \
|
||||
addgroup -g 1000 prowler && \
|
||||
adduser -D -u 1000 -G prowler prowler
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
@@ -49,26 +17,27 @@ COPY pyproject.toml ./
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
RUN poetry install && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
# Prevents known compatibility error between lxml and libxml2/libxmlsec versions.
|
||||
# See: https://github.com/xmlsec/python-xmlsec/issues/320
|
||||
RUN poetry run pip install --force-reinstall --no-binary lxml lxml
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
# hadolint ignore=DL3006
|
||||
FROM build AS dev
|
||||
|
||||
USER 0
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add curl vim
|
||||
|
||||
USER prowler
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
|
||||
|
||||
# Production image
|
||||
|
||||
@@ -235,7 +235,6 @@ To view the logs for any component (e.g., Django, Celery worker), you can use th
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
```
|
||||
|
||||
## Applying migrations
|
||||
|
||||
|
||||
125
api/docker-compose.yml
Normal file
125
api/docker-compose.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-api
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
|
||||
profiles:
|
||||
- prod
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "prod"
|
||||
|
||||
api-dev:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
target: dev
|
||||
image: prowler-api-dev
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=config.django.devel
|
||||
- DJANGO_LOGGING_FORMATTER=human_readable
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "./src/backend:/home/prowler/backend"
|
||||
- "./pyproject.toml:/home/prowler/pyproject.toml"
|
||||
profiles:
|
||||
- dev
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "dev"
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
|
||||
hostname: "postgres-db"
|
||||
volumes:
|
||||
- ./_data/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
ports:
|
||||
- "${VALKEY_PORT:-6379}:6379"
|
||||
hostname: "valkey"
|
||||
volumes:
|
||||
- ./_data/valkey:/data
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
worker:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "worker"
|
||||
|
||||
worker-beat:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
@@ -3,10 +3,6 @@
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
|
||||
# Fix Inconsistent migration history after adding sites app
|
||||
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
|
||||
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
@@ -32,7 +28,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
3956
api/poetry.lock
generated
3956
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,44 +2,39 @@
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.10",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra (>=2.0.8,<3.0.0)",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
|
||||
"drf-nested-routers (>=0.94.1,<1.0.0)",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.8",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10",
|
||||
"openai (>=1.82.0,<2.0.0)"
|
||||
]
|
||||
[tool.poetry]
|
||||
authors = ["Prowler Team"]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.9.1"
|
||||
version = "1.5.3"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
dj-rest-auth = {extras = ["with_social", "jwt"], version = "7.0.1"}
|
||||
django = "5.1.5"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
django-environ = "0.11.2"
|
||||
django-filter = "24.3"
|
||||
django-guid = "3.5.0"
|
||||
django-postgres-extra = "^2.0.8"
|
||||
djangorestframework = "3.15.2"
|
||||
djangorestframework-jsonapi = "7.0.2"
|
||||
djangorestframework-simplejwt = "^5.3.1"
|
||||
drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "v5.4"}
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
python = ">=3.11,<3.13"
|
||||
sentry-sdk = {extras = ["django"], version = "^2.20.0"}
|
||||
uuid6 = "2024.7.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
@@ -47,7 +42,6 @@ coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
marshmallow = ">=3.15.0,<4.0.0"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
@@ -60,3 +54,6 @@ ruff = "0.5.0"
|
||||
safety = "3.2.9"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -17,8 +17,6 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
# if sociallogin.account.provider == "saml":
|
||||
# email = sociallogin.user.email
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
if existing_user:
|
||||
@@ -31,75 +29,7 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
extra = sociallogin.account.extra_data
|
||||
|
||||
# if provider == "saml":
|
||||
# # Handle SAML-specific logic
|
||||
# user.first_name = (
|
||||
# extra.get("firstName", [""])[0] if extra.get("firstName") else ""
|
||||
# )
|
||||
# user.last_name = (
|
||||
# extra.get("lastName", [""])[0] if extra.get("lastName") else ""
|
||||
# )
|
||||
# user.company_name = (
|
||||
# extra.get("organization", [""])[0]
|
||||
# if extra.get("organization")
|
||||
# else ""
|
||||
# )
|
||||
# user.name = f"{user.first_name} {user.last_name}".strip()
|
||||
# if user.name == "":
|
||||
# user.name = "N/A"
|
||||
# user.save(using=MainRouter.admin_db)
|
||||
|
||||
# email_domain = user.email.split("@")[-1]
|
||||
# tenant = (
|
||||
# SAMLConfiguration.objects.using(MainRouter.admin_db)
|
||||
# .get(email_domain=email_domain)
|
||||
# .tenant
|
||||
# )
|
||||
|
||||
# with rls_transaction(str(tenant.id)):
|
||||
# role_name = (
|
||||
# extra.get("userType", ["saml_default_role"])[0].strip()
|
||||
# if extra.get("userType")
|
||||
# else "saml_default_role"
|
||||
# )
|
||||
|
||||
# try:
|
||||
# role = Role.objects.using(MainRouter.admin_db).get(
|
||||
# name=role_name, tenant_id=tenant.id
|
||||
# )
|
||||
# except Role.DoesNotExist:
|
||||
# role = Role.objects.using(MainRouter.admin_db).create(
|
||||
# name=role_name,
|
||||
# tenant_id=tenant.id,
|
||||
# manage_users=False,
|
||||
# manage_account=False,
|
||||
# manage_billing=False,
|
||||
# manage_providers=False,
|
||||
# manage_integrations=False,
|
||||
# manage_scans=False,
|
||||
# unlimited_visibility=False,
|
||||
# )
|
||||
|
||||
# Membership.objects.using(MainRouter.admin_db).create(
|
||||
# user=user,
|
||||
# tenant=tenant,
|
||||
# role=Membership.RoleChoices.MEMBER,
|
||||
# )
|
||||
|
||||
# UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
# user=user,
|
||||
# role=role,
|
||||
# tenant_id=tenant.id,
|
||||
# )
|
||||
|
||||
# Handle other providers (e.g., GitHub, Google)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = extra.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
@@ -124,5 +54,4 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
@@ -109,6 +109,16 @@ class BaseTenantViewset(BaseViewSet):
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
@@ -116,8 +126,8 @@ class BaseTenantViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
user_id = str(request.user.id)
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -1,38 +1,12 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS = {}
|
||||
|
||||
|
||||
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
|
||||
"""
|
||||
Retrieve and cache the list of available compliance frameworks for a specific cloud provider.
|
||||
|
||||
This function lazily loads and caches the available compliance frameworks (e.g., CIS, MITRE, ISO)
|
||||
for each provider type (AWS, Azure, GCP, etc.) on first access. Subsequent calls for the same
|
||||
provider will return the cached result.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
|
||||
available compliance frameworks (e.g., "aws", "azure", "gcp", "m365").
|
||||
|
||||
Returns:
|
||||
list[str]: A list of framework identifiers (e.g., "cis_1.4_aws", "mitre_attack_azure") available
|
||||
for the given provider.
|
||||
"""
|
||||
global AVAILABLE_COMPLIANCE_FRAMEWORKS
|
||||
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = (
|
||||
get_available_compliance_frameworks(provider_type)
|
||||
)
|
||||
|
||||
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
|
||||
|
||||
|
||||
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
|
||||
@@ -190,16 +164,10 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
req_status_val = "MANUAL" if total_checks == 0 else "PASS"
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"tactics": getattr(requirement, "Tactics", []),
|
||||
"subtechniques": getattr(requirement, "SubTechniques", []),
|
||||
"platforms": getattr(requirement, "Platforms", []),
|
||||
"technique_url": getattr(requirement, "TechniqueURL", ""),
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
@@ -210,18 +178,20 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
"manual": 0,
|
||||
"total": total_checks,
|
||||
},
|
||||
"status": req_status_val,
|
||||
"status": "PASS",
|
||||
}
|
||||
|
||||
# Update requirements status counts for the framework
|
||||
if req_status_val == "MANUAL":
|
||||
# Update requirements status
|
||||
if total_checks == 0:
|
||||
requirements_status["manual"] += 1
|
||||
elif req_status_val == "PASS":
|
||||
requirements_status["passed"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Calculate pending requirements
|
||||
pending_requirements = total_requirements - requirements_status["manual"]
|
||||
requirements_status["passed"] = pending_requirements
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import re
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
@@ -153,28 +152,6 @@ def delete_related_daily_task(provider_id: str):
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
def create_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-create model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_create()` will be called.
|
||||
objects (list): List of model instances (unsaved) to bulk-create.
|
||||
batch_size (int): Maximum number of objects per bulk_create call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for i in range(0, total, batch_size):
|
||||
chunk = objects[i : i + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_create(chunk, batch_size)
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
@@ -250,167 +227,6 @@ def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
def _should_create_index_on_partition(
|
||||
partition_name: str, all_partitions: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
Determine if we should create an index on this partition.
|
||||
|
||||
Args:
|
||||
partition_name: The name of the partition (e.g., "findings_2025_aug", "findings_default")
|
||||
all_partitions: If True, create on all partitions. If False, only current/future partitions.
|
||||
|
||||
Returns:
|
||||
bool: True if index should be created on this partition, False otherwise.
|
||||
"""
|
||||
if all_partitions:
|
||||
return True
|
||||
|
||||
# Extract date from partition name if it follows the pattern
|
||||
# Partition names look like: findings_2025_aug, findings_2025_jul, etc.
|
||||
date_pattern = r"(\d{4})_([a-z]{3})$"
|
||||
match = re.search(date_pattern, partition_name)
|
||||
|
||||
if not match:
|
||||
# If we can't parse the date, include it to be safe (e.g., default partition)
|
||||
return True
|
||||
|
||||
try:
|
||||
year_str, month_abbr = match.groups()
|
||||
year = int(year_str)
|
||||
|
||||
# Map month abbreviations to numbers
|
||||
month_map = {
|
||||
"jan": 1,
|
||||
"feb": 2,
|
||||
"mar": 3,
|
||||
"apr": 4,
|
||||
"may": 5,
|
||||
"jun": 6,
|
||||
"jul": 7,
|
||||
"aug": 8,
|
||||
"sep": 9,
|
||||
"oct": 10,
|
||||
"nov": 11,
|
||||
"dec": 12,
|
||||
}
|
||||
|
||||
month = month_map.get(month_abbr.lower())
|
||||
if month is None:
|
||||
# Unknown month abbreviation, include it to be safe
|
||||
return True
|
||||
|
||||
partition_date = datetime(year, month, 1, tzinfo=timezone.utc)
|
||||
|
||||
# Get current month start
|
||||
now = datetime.now(timezone.utc)
|
||||
current_month_start = now.replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
# Include current month and future partitions
|
||||
return partition_date >= current_month_start
|
||||
|
||||
except (ValueError, TypeError):
|
||||
# If date parsing fails, include it to be safe
|
||||
return True
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
columns: str,
|
||||
method: str = "BTREE",
|
||||
where: str = "",
|
||||
all_partitions: bool = True,
|
||||
):
|
||||
"""
|
||||
Create an index on existing partitions of `parent_table`.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: A short name for the index (will be prefixed per-partition).
|
||||
columns: The parenthesized column list, e.g. "tenant_id, scan_id, status".
|
||||
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
|
||||
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
|
||||
all_partitions: Whether to create indexes on all partitions or just current/future ones.
|
||||
Defaults to False (current/future only) to avoid maintenance overhead
|
||||
on old partitions where the index may not be needed.
|
||||
|
||||
Examples:
|
||||
# Create index only on current and future partitions (recommended for new indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="new_performance_idx",
|
||||
columns="tenant_id, status, severity",
|
||||
all_partitions=False # Default behavior
|
||||
)
|
||||
|
||||
# Create index on all partitions (use when migrating existing critical indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="critical_existing_idx",
|
||||
columns="tenant_id, scan_id",
|
||||
all_partitions=True
|
||||
)
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
where_sql = f" WHERE {where}" if where else ""
|
||||
for partition in partitions:
|
||||
if _should_create_index_on_partition(partition, all_partitions):
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
|
||||
f"ON {partition} USING {method} ({columns})"
|
||||
f"{where_sql};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
):
|
||||
"""
|
||||
Drop the per-partition indexes that were created by create_index_on_partitions.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: The same short name used when creating them.
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
for partition in partitions:
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {idx_name};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
@@ -517,15 +333,3 @@ class InvitationStateEnum(EnumType):
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Integration type
|
||||
|
||||
|
||||
class IntegrationTypeEnum(EnumType):
|
||||
enum_type_name = "integration_type"
|
||||
|
||||
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
@@ -32,36 +32,6 @@ class InvitationTokenExpiredException(APIException):
|
||||
default_code = "token_expired"
|
||||
|
||||
|
||||
# Task Management Exceptions (non-HTTP)
|
||||
class TaskManagementError(Exception):
|
||||
"""Base exception for task management errors."""
|
||||
|
||||
def __init__(self, task=None):
|
||||
self.task = task
|
||||
super().__init__()
|
||||
|
||||
|
||||
class TaskFailedException(TaskManagementError):
|
||||
"""Raised when a task has failed."""
|
||||
|
||||
|
||||
class TaskNotFoundException(TaskManagementError):
|
||||
"""Raised when a task is not found."""
|
||||
|
||||
|
||||
class TaskInProgressException(TaskManagementError):
|
||||
"""Raised when a task is running but there's no related Task object to return."""
|
||||
|
||||
def __init__(self, task_result=None):
|
||||
self.task_result = task_result
|
||||
super().__init__()
|
||||
|
||||
|
||||
# Provider connection errors
|
||||
class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
@@ -69,12 +39,7 @@ def custom_exception_handler(exc, context):
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
@@ -22,9 +22,8 @@ from api.db_utils import (
|
||||
StatusEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
@@ -81,114 +80,6 @@ class ChoiceInFilter(BaseInFilter, ChoiceFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CommonFindingFilters(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
muted = BooleanFilter(
|
||||
help_text="If this filter is not provided, muted and non-muted findings will be returned."
|
||||
)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(method="filter_resource_region")
|
||||
region__in = CharInFilter(field_name="resource_regions", lookup_expr="overlap")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resource_regions", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(method="filter_resource_service")
|
||||
service__in = CharInFilter(field_name="resource_services", lookup_expr="overlap")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resource_services", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(method="filter_resource_type")
|
||||
resource_type__in = CharInFilter(field_name="resource_types", lookup_expr="overlap")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
|
||||
class TenantFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
@@ -365,7 +256,91 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class FindingFilter(CommonFindingFilters):
|
||||
class FindingFilter(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(field_name="resources__region")
|
||||
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resources__region", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(field_name="resources__service")
|
||||
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resources__service", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(field_name="resources__type")
|
||||
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
@@ -406,15 +381,6 @@ class FindingFilter(CommonFindingFilters):
|
||||
},
|
||||
}
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("inserted_at")
|
||||
@@ -533,6 +499,16 @@ class FindingFilter(CommonFindingFilters):
|
||||
|
||||
return queryset.filter(id__lt=end)
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
dt = value
|
||||
@@ -541,31 +517,6 @@ class FindingFilter(CommonFindingFilters):
|
||||
return dt
|
||||
|
||||
|
||||
class LatestFindingFilter(CommonFindingFilters):
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
@@ -637,11 +588,12 @@ class RoleFilter(FilterSet):
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
scan_id = UUIDFilter(field_name="scan_id")
|
||||
region = CharFilter(field_name="region")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
|
||||
scan_id = UUIDFilter(field_name="scan__id")
|
||||
|
||||
class Meta:
|
||||
model = ComplianceRequirementOverview
|
||||
model = ComplianceOverview
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"compliance_id": ["exact", "icontains"],
|
||||
@@ -661,6 +613,12 @@ class ScanSummaryFilter(FilterSet):
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
muted_findings = BooleanFilter(method="filter_muted_findings")
|
||||
|
||||
def filter_muted_findings(self, queryset, name, value):
|
||||
if not value:
|
||||
return queryset.exclude(muted__gt=0)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
@@ -671,6 +629,8 @@ class ScanSummaryFilter(FilterSet):
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
muted_findings = None
|
||||
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
@@ -688,19 +648,3 @@ class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
|
||||
class IntegrationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
|
||||
integration_type__in = ChoiceInFilter(
|
||||
choices=Integration.IntegrationChoices.choices,
|
||||
field_name="integration_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
|
||||
|
||||
def table_exists(table_name):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = %s
|
||||
)
|
||||
""",
|
||||
[table_name],
|
||||
)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix migration inconsistency between socialaccount and sites"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--database",
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help="Specifies the database to operate on.",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
db = options["database"]
|
||||
connection = connections[db]
|
||||
recorder = MigrationRecorder(connection)
|
||||
|
||||
applied = set(recorder.applied_migrations())
|
||||
|
||||
has_social = ("socialaccount", "0001_initial") in applied
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_name = 'django_site'
|
||||
);
|
||||
"""
|
||||
)
|
||||
site_table_exists = cursor.fetchone()[0]
|
||||
|
||||
if has_social and not site_table_exists:
|
||||
self.stdout.write(
|
||||
f"Detected inconsistency in '{db}'. Creating 'django_site' table manually..."
|
||||
)
|
||||
|
||||
with transaction.atomic(using=db):
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(Site)
|
||||
|
||||
recorder.record_applied("sites", "0001_initial")
|
||||
recorder.record_applied("sites", "0002_alter_domain_unique")
|
||||
|
||||
self.stdout.write(
|
||||
"Fixed: 'django_site' table created and migrations registered."
|
||||
)
|
||||
|
||||
# Ensure the relationship table also exists
|
||||
if not table_exists("socialaccount_socialapp_sites"):
|
||||
self.stdout.write(
|
||||
"Detected missing 'socialaccount_socialapp_sites' table. Creating manually..."
|
||||
)
|
||||
with connection.schema_editor() as schema_editor:
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
schema_editor.create_model(
|
||||
SocialApp._meta.get_field("sites").remote_field.through
|
||||
)
|
||||
self.stdout.write(
|
||||
"Fixed: 'socialaccount_socialapp_sites' table created."
|
||||
)
|
||||
@@ -12,7 +12,6 @@ from api.models import (
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
StatusChoices,
|
||||
)
|
||||
@@ -134,7 +133,6 @@ class Command(BaseCommand):
|
||||
region=random.choice(possible_regions),
|
||||
service=random.choice(possible_services),
|
||||
type=random.choice(possible_types),
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -183,10 +181,6 @@ class Command(BaseCommand):
|
||||
"servicename": assigned_resource.service,
|
||||
"resourcetype": assigned_resource.type,
|
||||
},
|
||||
resource_types=[assigned_resource.type],
|
||||
resource_regions=[assigned_resource.region],
|
||||
resource_services=[assigned_resource.service],
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -203,22 +197,12 @@ class Command(BaseCommand):
|
||||
|
||||
# Create ResourceFindingMapping
|
||||
mappings = []
|
||||
scan_resource_cache: set[tuple] = set()
|
||||
for index, finding_instance in enumerate(findings):
|
||||
resource_instance = resources[findings_resources_mapping[index]]
|
||||
for index, f in enumerate(findings):
|
||||
mappings.append(
|
||||
ResourceFindingMapping(
|
||||
tenant_id=tenant_id,
|
||||
resource=resource_instance,
|
||||
finding=finding_instance,
|
||||
)
|
||||
)
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
str(resource_instance.id),
|
||||
resource_instance.service,
|
||||
resource_instance.region,
|
||||
resource_instance.type,
|
||||
resource=resources[findings_resources_mapping[index]],
|
||||
finding=f,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -236,38 +220,6 @@ class Command(BaseCommand):
|
||||
"Resource-finding mappings created successfully.\n\n"
|
||||
)
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 99
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.WARNING("Creating finding filter values..."))
|
||||
resource_scan_summaries = [
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
resource_id=resource_id,
|
||||
service=service,
|
||||
region=region,
|
||||
resource_type=resource_type,
|
||||
)
|
||||
for resource_id, service, region, resource_type in scan_resource_cache
|
||||
]
|
||||
num_batches = ceil(len(resource_scan_summaries) / batch_size)
|
||||
with rls_transaction(tenant_id):
|
||||
for i in tqdm(
|
||||
range(0, len(resource_scan_summaries), batch_size),
|
||||
total=num_batches,
|
||||
):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
resource_scan_summaries[i : i + batch_size],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Finding filter values created successfully.\n\n")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
|
||||
scan_state = "failed"
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
|
||||
from api.models import Integration
|
||||
|
||||
IntegrationTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="integration_type",
|
||||
enum_values=tuple(
|
||||
integration_type[0]
|
||||
for integration_type in Integration.IntegrationChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0012_scan_report_output"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
IntegrationTypeEnumMigration.create_enum_type,
|
||||
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=IntegrationTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,131 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0013_integrations_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Integration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("enabled", models.BooleanField(default=False)),
|
||||
("connected", models.BooleanField(blank=True, null=True)),
|
||||
(
|
||||
"connection_last_checked_at",
|
||||
models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
(
|
||||
"integration_type",
|
||||
api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("saml", "SAML"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
("_credentials", models.BinaryField(db_column="credentials")),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={"db_table": "integrations", "abstract": False},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="integration",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IntegrationProviderRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"integration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.integration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "integration_provider_mappings",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("integration_id", "provider_id"),
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="IntegrationProviderRelationship",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integrationproviderrelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="integration",
|
||||
name="providers",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="integrations",
|
||||
through="api.IntegrationProviderRelationship",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,26 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-25 11:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0014_integrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="finding",
|
||||
name="status",
|
||||
field=api.db_utils.StatusEnumField(
|
||||
choices=[("FAIL", "Fail"), ("PASS", "Pass"), ("MANUAL", "Manual")]
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,32 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-31 10:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0015_finding_muted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="compliance",
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="details",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="metadata",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="partition",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,32 +0,0 @@
|
||||
# Generated by Django 5.1.7 on 2025-04-16 08:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0016_finding_compliance_resource_details_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="provider",
|
||||
field=api.db_utils.ProviderEnumField(
|
||||
choices=[
|
||||
("aws", "AWS"),
|
||||
("azure", "Azure"),
|
||||
("gcp", "GCP"),
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
],
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'm365';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,81 +0,0 @@
|
||||
# Generated by Django 5.1.7 on 2025-05-05 10:01
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
import uuid6
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0017_m365_provider"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ResourceScanSummary",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("scan_id", models.UUIDField(db_index=True, default=uuid6.uuid7)),
|
||||
("resource_id", models.UUIDField(db_index=True, default=uuid.uuid4)),
|
||||
("service", models.CharField(max_length=100)),
|
||||
("region", models.CharField(max_length=100)),
|
||||
("resource_type", models.CharField(max_length=100)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "resource_scan_summaries",
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="rss_tenant_scan_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region"],
|
||||
name="rss_tenant_scan_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "resource_type"],
|
||||
name="rss_tenant_scan_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "service"],
|
||||
name="rss_tenant_scan_reg_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service", "resource_type"],
|
||||
name="rss_tenant_scan_svc_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "resource_type"],
|
||||
name="rss_tenant_scan_reg_type_idx",
|
||||
),
|
||||
],
|
||||
"unique_together": {("tenant_id", "scan_id", "resource_id")},
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="resourcescansummary",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_resourcescansummary",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,42 +0,0 @@
|
||||
import django.contrib.postgres.fields
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0018_resource_scan_summaries"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_regions",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_services",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_types",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,86 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0019_finding_denormalize_resource_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_service_idx",
|
||||
columns="resource_services",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_service_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_region_idx",
|
||||
columns="resource_regions",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_region_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_rtype_idx",
|
||||
columns="resource_types",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_rtype_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_uid_idx",
|
||||
),
|
||||
reverse_code=partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_uid_idx",
|
||||
columns="uid",
|
||||
method="BTREE",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_filter_idx",
|
||||
),
|
||||
reverse_code=partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_filter_idx",
|
||||
columns="scan_id, impact, severity, status, check_id, delta",
|
||||
method="BTREE",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,37 +0,0 @@
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0020_findings_new_performance_indexes_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_services"], name="gin_find_service_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_regions"], name="gin_find_region_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_types"], name="gin_find_rtype_idx"
|
||||
),
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="finding",
|
||||
name="findings_uid_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="finding",
|
||||
name="findings_filter_idx",
|
||||
),
|
||||
]
|
||||
@@ -1,38 +0,0 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-12 10:04
|
||||
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0021_findings_new_performance_indexes_parent"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state", "completed")),
|
||||
fields=["tenant_id", "provider_id", "state", "-inserted_at"],
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="ss_tenant_scan_service_idx",
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "severity"],
|
||||
name="ss_tenant_scan_severity_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,28 +0,0 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-12 10:18
|
||||
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0022_scan_summaries_performance_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"], name="resources_tenant_id_idx"
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,29 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0023_resources_lookup_optimization"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
columns="tenant_id, uid, inserted_at DESC",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0024_findings_uid_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "uid", "-inserted_at"],
|
||||
name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0025_findings_uid_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider_secret_type ADD VALUE IF NOT EXISTS 'service_account';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,124 +0,0 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-21 11:37
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0026_provider_secret_gcp_service_account"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ComplianceRequirementOverview",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("compliance_id", models.TextField(blank=False)),
|
||||
("framework", models.TextField(blank=False)),
|
||||
("version", models.TextField(blank=True)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("region", models.TextField(blank=False)),
|
||||
("requirement_id", models.TextField(blank=False)),
|
||||
(
|
||||
"requirement_status",
|
||||
api.db_utils.StatusEnumField(
|
||||
choices=[
|
||||
("FAIL", "Fail"),
|
||||
("PASS", "Pass"),
|
||||
("MANUAL", "Manual"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("passed_checks", models.IntegerField(default=0)),
|
||||
("failed_checks", models.IntegerField(default=0)),
|
||||
("total_checks", models.IntegerField(default=0)),
|
||||
(
|
||||
"scan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="compliance_requirements_overviews",
|
||||
related_query_name="compliance_requirements_overview",
|
||||
to="api.scan",
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "compliance_requirements_overviews",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id"],
|
||||
name="cro_scan_comp_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id", "region"],
|
||||
name="cro_scan_comp_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
],
|
||||
name="cro_scan_comp_req_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
],
|
||||
name="cro_scan_comp_req_reg_idx",
|
||||
),
|
||||
],
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=(
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
),
|
||||
name="unique_tenant_compliance_requirement_overview",
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="ComplianceRequirementOverview",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_compliancerequirementoverview",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,29 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0027_compliance_requirement_overviews"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_check_idx",
|
||||
columns="tenant_id, scan_id, check_id",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_check_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0028_findings_check_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,107 +0,0 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-12 12:45
|
||||
|
||||
import uuid
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0029_findings_check_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LighthouseConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="Name of the configuration",
|
||||
max_length=100,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
(
|
||||
"api_key",
|
||||
models.BinaryField(
|
||||
help_text="Encrypted API key for the LLM service"
|
||||
),
|
||||
),
|
||||
(
|
||||
"model",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("gpt-4o-2024-11-20", "GPT-4o v2024-11-20"),
|
||||
("gpt-4o-2024-08-06", "GPT-4o v2024-08-06"),
|
||||
("gpt-4o-2024-05-13", "GPT-4o v2024-05-13"),
|
||||
("gpt-4o", "GPT-4o Default"),
|
||||
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
|
||||
("gpt-4o-mini", "GPT-4o Mini Default"),
|
||||
],
|
||||
default="gpt-4o-2024-08-06",
|
||||
help_text="Must be one of the supported model names",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"temperature",
|
||||
models.FloatField(default=0, help_text="Must be between 0 and 1"),
|
||||
),
|
||||
(
|
||||
"max_tokens",
|
||||
models.IntegerField(
|
||||
default=4000, help_text="Must be between 500 and 5000"
|
||||
),
|
||||
),
|
||||
(
|
||||
"business_context",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Additional business context for this AI model configuration",
|
||||
),
|
||||
),
|
||||
("is_active", models.BooleanField(default=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "lighthouse_configurations",
|
||||
"abstract": False,
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id",),
|
||||
name="unique_lighthouse_config_per_tenant",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_lighthouseconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,24 +0,0 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-23 10:04
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0030_lighthouseconfiguration"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,13 +1,10 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
from cryptography.fernet import Fernet, InvalidToken
|
||||
from cryptography.fernet import Fernet
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AbstractBaseUser
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVector, SearchVectorField
|
||||
from django.core.validators import MinLengthValidator
|
||||
@@ -24,7 +21,6 @@ from uuid6 import uuid7
|
||||
from api.db_utils import (
|
||||
CustomUserManager,
|
||||
FindingDeltaEnumField,
|
||||
IntegrationTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
MemberRoleEnumField,
|
||||
ProviderEnumField,
|
||||
@@ -51,8 +47,6 @@ fernet = Fernet(settings.SECRETS_ENCRYPTION_KEY.encode())
|
||||
# Convert Prowler Severity enum to Django TextChoices
|
||||
SeverityChoices = enum_to_choices(Severity)
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
|
||||
class StatusChoices(models.TextChoices):
|
||||
"""
|
||||
@@ -64,6 +58,7 @@ class StatusChoices(models.TextChoices):
|
||||
FAIL = "FAIL", _("Fail")
|
||||
PASS = "PASS", _("Pass")
|
||||
MANUAL = "MANUAL", _("Manual")
|
||||
MUTED = "MUTED", _("Muted")
|
||||
|
||||
|
||||
class StateChoices(models.TextChoices):
|
||||
@@ -196,7 +191,6 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
AZURE = "azure", _("Azure")
|
||||
GCP = "gcp", _("GCP")
|
||||
KUBERNETES = "kubernetes", _("Kubernetes")
|
||||
M365 = "m365", _("M365")
|
||||
|
||||
@staticmethod
|
||||
def validate_aws_uid(value):
|
||||
@@ -220,19 +214,6 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_m365_uid(value):
|
||||
if not re.match(
|
||||
r"""^(?!-)[A-Za-z0-9](?:[A-Za-z0-9-]{0,61}[A-Za-z0-9])?(?:\.(?!-)[A-Za-z0-9]"""
|
||||
r"""(?:[A-Za-z0-9-]{0,61}[A-Za-z0-9])?)*\.[A-Za-z]{2,}$""",
|
||||
value,
|
||||
):
|
||||
raise ModelValidationError(
|
||||
detail="M365 domain ID must be a valid domain.",
|
||||
code="m365-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_gcp_uid(value):
|
||||
if not re.match(r"^[a-z][a-z0-9-]{5,29}$", value):
|
||||
@@ -246,7 +227,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
@staticmethod
|
||||
def validate_kubernetes_uid(value):
|
||||
if not re.match(
|
||||
r"^[a-zA-Z0-9][a-zA-Z0-9._@:\/-]{1,250}$",
|
||||
r"^[a-z0-9][A-Za-z0-9_.:\/-]{1,250}$",
|
||||
value,
|
||||
):
|
||||
raise ModelValidationError(
|
||||
@@ -431,10 +412,9 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
next_scan_at = models.DateTimeField(null=True, blank=True)
|
||||
scheduler_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
|
||||
)
|
||||
output_location = models.CharField(blank=True, null=True, max_length=200)
|
||||
|
||||
# TODO: mutelist foreign key
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
@@ -457,11 +437,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "-inserted_at"],
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -543,11 +518,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
editable=False,
|
||||
)
|
||||
|
||||
metadata = models.TextField(blank=True, null=True)
|
||||
details = models.TextField(blank=True, null=True)
|
||||
partition = models.TextField(blank=True, null=True)
|
||||
|
||||
# Relationships
|
||||
tags = models.ManyToManyField(
|
||||
ResourceTag,
|
||||
verbose_name="Tags associated with the resource, by provider",
|
||||
@@ -588,11 +558,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
name="resource_tenant_metadata_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
|
||||
models.Index(fields=["tenant_id", "id"], name="resources_tenant_id_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
@@ -690,23 +655,6 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
tags = models.JSONField(default=dict, null=True, blank=True)
|
||||
check_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
check_metadata = models.JSONField(default=dict, null=False)
|
||||
muted = models.BooleanField(default=False, null=False)
|
||||
compliance = models.JSONField(default=dict, null=True, blank=True)
|
||||
|
||||
# Denormalize resource data for performance
|
||||
resource_regions = ArrayField(
|
||||
models.CharField(max_length=100), blank=True, null=True
|
||||
)
|
||||
resource_services = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
resource_types = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
scan = models.ForeignKey(to=Scan, related_name="findings", on_delete=models.CASCADE)
|
||||
@@ -748,6 +696,18 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(fields=["uid"], name="findings_uid_idx"),
|
||||
models.Index(
|
||||
fields=[
|
||||
"scan_id",
|
||||
"impact",
|
||||
"severity",
|
||||
"status",
|
||||
"check_id",
|
||||
"delta",
|
||||
],
|
||||
name="findings_filter_idx",
|
||||
),
|
||||
models.Index(fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"),
|
||||
GinIndex(fields=["text_search"], name="gin_findings_search_idx"),
|
||||
models.Index(fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"),
|
||||
@@ -759,46 +719,19 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
condition=Q(delta="new"),
|
||||
name="find_delta_new_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "uid", "-inserted_at"],
|
||||
name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
GinIndex(fields=["resource_services"], name="gin_find_service_idx"),
|
||||
GinIndex(fields=["resource_regions"], name="gin_find_region_idx"),
|
||||
GinIndex(fields=["resource_types"], name="gin_find_rtype_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "findings"
|
||||
|
||||
def add_resources(self, resources: list[Resource] | None):
|
||||
if not resources:
|
||||
return
|
||||
|
||||
self.resource_regions = self.resource_regions or []
|
||||
self.resource_services = self.resource_services or []
|
||||
self.resource_types = self.resource_types or []
|
||||
|
||||
# Deduplication
|
||||
regions = set(self.resource_regions)
|
||||
services = set(self.resource_services)
|
||||
types = set(self.resource_types)
|
||||
|
||||
# Add new relationships with the tenant_id field
|
||||
for resource in resources:
|
||||
ResourceFindingMapping.objects.update_or_create(
|
||||
resource=resource, finding=self, tenant_id=self.tenant_id
|
||||
)
|
||||
regions.add(resource.region)
|
||||
services.add(resource.service)
|
||||
types.add(resource.type)
|
||||
|
||||
self.resource_regions = list(regions)
|
||||
self.resource_services = list(services)
|
||||
self.resource_types = list(types)
|
||||
# Save the instance
|
||||
self.save()
|
||||
|
||||
|
||||
@@ -858,7 +791,6 @@ class ProviderSecret(RowLevelSecurityProtectedModel):
|
||||
class TypeChoices(models.TextChoices):
|
||||
STATIC = "static", _("Key-value pairs")
|
||||
ROLE = "role", _("Role assumption")
|
||||
SERVICE_ACCOUNT = "service_account", _("GCP Service Account Key")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
@@ -936,11 +868,6 @@ class Invitation(RowLevelSecurityProtectedModel):
|
||||
null=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.email:
|
||||
self.email = self.email.strip().lower()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "invitations"
|
||||
|
||||
@@ -1156,78 +1083,6 @@ class ComplianceOverview(RowLevelSecurityProtectedModel):
|
||||
resource_name = "compliance-overviews"
|
||||
|
||||
|
||||
class ComplianceRequirementOverview(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
compliance_id = models.TextField(blank=False)
|
||||
framework = models.TextField(blank=False)
|
||||
version = models.TextField(blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
region = models.TextField(blank=False)
|
||||
|
||||
requirement_id = models.TextField(blank=False)
|
||||
requirement_status = StatusEnumField(choices=StatusChoices)
|
||||
passed_checks = models.IntegerField(default=0)
|
||||
failed_checks = models.IntegerField(default=0)
|
||||
total_checks = models.IntegerField(default=0)
|
||||
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="compliance_requirements_overviews",
|
||||
related_query_name="compliance_requirements_overview",
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "compliance_requirements_overviews"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=(
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
),
|
||||
name="unique_tenant_compliance_requirement_overview",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id"],
|
||||
name="cro_scan_comp_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id", "region"],
|
||||
name="cro_scan_comp_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id", "requirement_id"],
|
||||
name="cro_scan_comp_req_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
],
|
||||
name="cro_scan_comp_req_reg_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-requirements-overviews"
|
||||
|
||||
|
||||
class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
@@ -1278,506 +1133,8 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"],
|
||||
name="scan_summaries_tenant_scan_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="ss_tenant_scan_service_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "severity"],
|
||||
name="ss_tenant_scan_severity_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scan-summaries"
|
||||
|
||||
|
||||
class Integration(RowLevelSecurityProtectedModel):
|
||||
class IntegrationChoices(models.TextChoices):
|
||||
S3 = "amazon_s3", _("Amazon S3")
|
||||
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
|
||||
JIRA = "jira", _("JIRA")
|
||||
SLACK = "slack", _("Slack")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
enabled = models.BooleanField(default=False)
|
||||
connected = models.BooleanField(null=True, blank=True)
|
||||
connection_last_checked_at = models.DateTimeField(null=True, blank=True)
|
||||
integration_type = IntegrationTypeEnumField(choices=IntegrationChoices.choices)
|
||||
configuration = models.JSONField(default=dict)
|
||||
_credentials = models.BinaryField(db_column="credentials")
|
||||
|
||||
providers = models.ManyToManyField(
|
||||
Provider,
|
||||
related_name="integrations",
|
||||
through="IntegrationProviderRelationship",
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "integrations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "integrations"
|
||||
|
||||
@property
|
||||
def credentials(self):
|
||||
if isinstance(self._credentials, memoryview):
|
||||
encrypted_bytes = self._credentials.tobytes()
|
||||
elif isinstance(self._credentials, str):
|
||||
encrypted_bytes = self._credentials.encode()
|
||||
else:
|
||||
encrypted_bytes = self._credentials
|
||||
decrypted_data = fernet.decrypt(encrypted_bytes)
|
||||
return json.loads(decrypted_data.decode())
|
||||
|
||||
@credentials.setter
|
||||
def credentials(self, value):
|
||||
encrypted_data = fernet.encrypt(json.dumps(value).encode())
|
||||
self._credentials = encrypted_data
|
||||
|
||||
|
||||
class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
integration = models.ForeignKey(Integration, on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "integration_provider_mappings"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["integration_id", "provider_id"],
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# class SAMLToken(models.Model):
|
||||
# id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
# inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
# updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
# expires_at = models.DateTimeField(editable=False)
|
||||
# token = models.JSONField(unique=True)
|
||||
# user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
# class Meta:
|
||||
# db_table = "saml_tokens"
|
||||
|
||||
# def save(self, *args, **kwargs):
|
||||
# if not self.expires_at:
|
||||
# self.expires_at = datetime.now(timezone.utc) + timedelta(seconds=15)
|
||||
# super().save(*args, **kwargs)
|
||||
|
||||
# def is_expired(self) -> bool:
|
||||
# return datetime.now(timezone.utc) >= self.expires_at
|
||||
|
||||
|
||||
# class SAMLDomainIndex(models.Model):
|
||||
# """
|
||||
# Public index of SAML domains. No RLS. Used for fast lookup in SAML login flow.
|
||||
# """
|
||||
|
||||
# email_domain = models.CharField(max_length=254, unique=True)
|
||||
# tenant = models.ForeignKey("Tenant", on_delete=models.CASCADE)
|
||||
|
||||
# class Meta:
|
||||
# db_table = "saml_domain_index"
|
||||
|
||||
# constraints = [
|
||||
# models.UniqueConstraint(
|
||||
# fields=("email_domain", "tenant"),
|
||||
# name="unique_resources_by_email_domain",
|
||||
# ),
|
||||
# BaseSecurityConstraint(
|
||||
# name="statements_on_%(class)s",
|
||||
# statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
# ),
|
||||
# ]
|
||||
|
||||
|
||||
# class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
# """
|
||||
# Stores per-tenant SAML settings, including email domain and IdP metadata.
|
||||
# Automatically syncs to a SocialApp instance on save.
|
||||
|
||||
# Note:
|
||||
# This model exists to provide a tenant-aware abstraction over SAML configuration.
|
||||
# It supports row-level security, custom validation, and metadata parsing, enabling
|
||||
# Prowler to expose a clean API and admin interface for managing SAML integrations.
|
||||
|
||||
# Although Django Allauth uses the SocialApp model to store provider configuration,
|
||||
# it is not designed for multi-tenant use. SocialApp lacks support for tenant scoping,
|
||||
# email domain mapping, and structured metadata handling.
|
||||
|
||||
# By managing SAMLConfiguration separately, we ensure:
|
||||
# - Strong isolation between tenants via RLS.
|
||||
# - Ownership of raw IdP metadata and its validation.
|
||||
# - An explicit link between SAML config and business-level identifiers (e.g. email domain).
|
||||
# - Programmatic transformation into the SocialApp format used by Allauth.
|
||||
|
||||
# In short, this model acts as a secure and user-friendly layer over Allauth's lower-level primitives.
|
||||
# """
|
||||
|
||||
# id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
# email_domain = models.CharField(
|
||||
# max_length=254,
|
||||
# unique=True,
|
||||
# help_text="Email domain used to identify the tenant, e.g. prowlerdemo.com",
|
||||
# )
|
||||
# metadata_xml = models.TextField(
|
||||
# help_text="Raw IdP metadata XML to configure SingleSignOnService, certificates, etc."
|
||||
# )
|
||||
# created_at = models.DateTimeField(auto_now_add=True)
|
||||
# updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
# class JSONAPIMeta:
|
||||
# resource_name = "saml-configurations"
|
||||
|
||||
# class Meta:
|
||||
# db_table = "saml_configurations"
|
||||
|
||||
# constraints = [
|
||||
# RowLevelSecurityConstraint(
|
||||
# field="tenant_id",
|
||||
# name="rls_on_%(class)s",
|
||||
# statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
# ),
|
||||
# # 1 config per tenant
|
||||
# models.UniqueConstraint(
|
||||
# fields=["tenant"],
|
||||
# name="unique_samlconfig_per_tenant",
|
||||
# ),
|
||||
# ]
|
||||
|
||||
# def clean(self, old_email_domain=None):
|
||||
# # Domain must not contain @
|
||||
# if "@" in self.email_domain:
|
||||
# raise ValidationError({"email_domain": "Domain must not contain @"})
|
||||
|
||||
# # Enforce at most one config per tenant
|
||||
# qs = SAMLConfiguration.objects.filter(tenant=self.tenant)
|
||||
# # Exclude ourselves in case of update
|
||||
# if self.pk:
|
||||
# qs = qs.exclude(pk=self.pk)
|
||||
# if qs.exists():
|
||||
# raise ValidationError(
|
||||
# {"tenant": "A SAML configuration already exists for this tenant."}
|
||||
# )
|
||||
|
||||
# # The email domain must be unique in the entire system
|
||||
# qs = SAMLConfiguration.objects.using(MainRouter.admin_db).filter(
|
||||
# email_domain__iexact=self.email_domain
|
||||
# )
|
||||
# if qs.exists() and old_email_domain != self.email_domain:
|
||||
# raise ValidationError(
|
||||
# {"tenant": "There is a problem with your email domain."}
|
||||
# )
|
||||
|
||||
# def save(self, *args, **kwargs):
|
||||
# self.email_domain = self.email_domain.strip().lower()
|
||||
# is_create = not SAMLConfiguration.objects.filter(pk=self.pk).exists()
|
||||
|
||||
# if not is_create:
|
||||
# old = SAMLConfiguration.objects.get(pk=self.pk)
|
||||
# old_email_domain = old.email_domain
|
||||
# old_metadata_xml = old.metadata_xml
|
||||
# else:
|
||||
# old_email_domain = None
|
||||
# old_metadata_xml = None
|
||||
|
||||
# self.clean(old_email_domain)
|
||||
# super().save(*args, **kwargs)
|
||||
|
||||
# if is_create or (
|
||||
# old_email_domain != self.email_domain
|
||||
# or old_metadata_xml != self.metadata_xml
|
||||
# ):
|
||||
# self._sync_social_app(old_email_domain)
|
||||
|
||||
# # Sync the public index
|
||||
# if not is_create and old_email_domain and old_email_domain != self.email_domain:
|
||||
# SAMLDomainIndex.objects.filter(email_domain=old_email_domain).delete()
|
||||
|
||||
# # Create/update the new domain index
|
||||
# SAMLDomainIndex.objects.update_or_create(
|
||||
# email_domain=self.email_domain, defaults={"tenant": self.tenant}
|
||||
# )
|
||||
|
||||
# def _parse_metadata(self):
|
||||
# """
|
||||
# Parse the raw IdP metadata XML and extract:
|
||||
# - entity_id
|
||||
# - sso_url
|
||||
# - slo_url (may be None)
|
||||
# - x509cert (required)
|
||||
# """
|
||||
# ns = {
|
||||
# "md": "urn:oasis:names:tc:SAML:2.0:metadata",
|
||||
# "ds": "http://www.w3.org/2000/09/xmldsig#",
|
||||
# }
|
||||
# try:
|
||||
# root = ET.fromstring(self.metadata_xml)
|
||||
# except ET.ParseError as e:
|
||||
# raise ValidationError({"metadata_xml": f"Invalid XML: {e}"})
|
||||
|
||||
# # Entity ID
|
||||
# entity_id = root.attrib.get("entityID")
|
||||
|
||||
# # SSO endpoint (must exist)
|
||||
# sso = root.find(".//md:IDPSSODescriptor/md:SingleSignOnService", ns)
|
||||
# if sso is None or "Location" not in sso.attrib:
|
||||
# raise ValidationError(
|
||||
# {"metadata_xml": "Missing SingleSignOnService in metadata."}
|
||||
# )
|
||||
# sso_url = sso.attrib["Location"]
|
||||
|
||||
# # SLO endpoint (optional)
|
||||
# slo = root.find(".//md:IDPSSODescriptor/md:SingleLogoutService", ns)
|
||||
# slo_url = slo.attrib.get("Location") if slo is not None else None
|
||||
|
||||
# # X.509 certificate (required)
|
||||
# cert = root.find(
|
||||
# './/md:KeyDescriptor[@use="signing"]/ds:KeyInfo/ds:X509Data/ds:X509Certificate',
|
||||
# ns,
|
||||
# )
|
||||
# if cert is None or not cert.text or not cert.text.strip():
|
||||
# raise ValidationError(
|
||||
# {
|
||||
# "metadata_xml": 'Metadata must include a <ds:X509Certificate> under <KeyDescriptor use="signing">.'
|
||||
# }
|
||||
# )
|
||||
# x509cert = cert.text.strip()
|
||||
|
||||
# return {
|
||||
# "entity_id": entity_id,
|
||||
# "sso_url": sso_url,
|
||||
# "slo_url": slo_url,
|
||||
# "x509cert": x509cert,
|
||||
# }
|
||||
|
||||
# def _sync_social_app(self, previous_email_domain=None):
|
||||
# """
|
||||
# Create or update the corresponding SocialApp based on email_domain.
|
||||
# If the domain changed, update the matching SocialApp.
|
||||
# """
|
||||
# idp_settings = self._parse_metadata()
|
||||
# settings_dict = SOCIALACCOUNT_PROVIDERS["saml"].copy()
|
||||
# settings_dict["idp"] = idp_settings
|
||||
|
||||
# current_site = Site.objects.get(id=settings.SITE_ID)
|
||||
|
||||
# social_app_qs = SocialApp.objects.filter(
|
||||
# provider="saml", client_id=previous_email_domain or self.email_domain
|
||||
# )
|
||||
|
||||
# client_id = self.email_domain[:191]
|
||||
# name = f"SAML-{self.email_domain}"[:40]
|
||||
|
||||
# if social_app_qs.exists():
|
||||
# social_app = social_app_qs.first()
|
||||
# social_app.client_id = client_id
|
||||
# social_app.name = name
|
||||
# social_app.settings = settings_dict
|
||||
# social_app.save()
|
||||
# social_app.sites.set([current_site])
|
||||
# else:
|
||||
# social_app = SocialApp.objects.create(
|
||||
# provider="saml",
|
||||
# client_id=client_id,
|
||||
# name=name,
|
||||
# settings=settings_dict,
|
||||
# )
|
||||
# social_app.sites.set([current_site])
|
||||
|
||||
|
||||
class ResourceScanSummary(RowLevelSecurityProtectedModel):
|
||||
scan_id = models.UUIDField(default=uuid7, db_index=True)
|
||||
resource_id = models.UUIDField(default=uuid4, db_index=True)
|
||||
service = models.CharField(max_length=100)
|
||||
region = models.CharField(max_length=100)
|
||||
resource_type = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
db_table = "resource_scan_summaries"
|
||||
unique_together = (("tenant_id", "scan_id", "resource_id"),)
|
||||
|
||||
indexes = [
|
||||
# Single-dimension lookups:
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="rss_tenant_scan_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region"],
|
||||
name="rss_tenant_scan_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "resource_type"],
|
||||
name="rss_tenant_scan_type_idx",
|
||||
),
|
||||
# Two-dimension cross-filters:
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "service"],
|
||||
name="rss_tenant_scan_reg_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service", "resource_type"],
|
||||
name="rss_tenant_scan_svc_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "resource_type"],
|
||||
name="rss_tenant_scan_reg_type_idx",
|
||||
),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class LighthouseConfiguration(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Stores configuration and API keys for LLM services.
|
||||
"""
|
||||
|
||||
class ModelChoices(models.TextChoices):
|
||||
GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", _("GPT-4o v2024-11-20")
|
||||
GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", _("GPT-4o v2024-08-06")
|
||||
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", _("GPT-4o v2024-05-13")
|
||||
GPT_4O = "gpt-4o", _("GPT-4o Default")
|
||||
GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", _("GPT-4o Mini v2024-07-18")
|
||||
GPT_4O_MINI = "gpt-4o-mini", _("GPT-4o Mini Default")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
validators=[MinLengthValidator(3)],
|
||||
blank=False,
|
||||
null=False,
|
||||
help_text="Name of the configuration",
|
||||
)
|
||||
api_key = models.BinaryField(
|
||||
blank=False, null=False, help_text="Encrypted API key for the LLM service"
|
||||
)
|
||||
model = models.CharField(
|
||||
max_length=50,
|
||||
choices=ModelChoices.choices,
|
||||
blank=False,
|
||||
null=False,
|
||||
default=ModelChoices.GPT_4O_2024_08_06,
|
||||
help_text="Must be one of the supported model names",
|
||||
)
|
||||
temperature = models.FloatField(default=0, help_text="Must be between 0 and 1")
|
||||
max_tokens = models.IntegerField(
|
||||
default=4000, help_text="Must be between 500 and 5000"
|
||||
)
|
||||
business_context = models.TextField(
|
||||
blank=True,
|
||||
null=False,
|
||||
default="",
|
||||
help_text="Additional business context for this AI model configuration",
|
||||
)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Validate temperature
|
||||
if not 0 <= self.temperature <= 1:
|
||||
raise ModelValidationError(
|
||||
detail="Temperature must be between 0 and 1",
|
||||
code="invalid_temperature",
|
||||
pointer="/data/attributes/temperature",
|
||||
)
|
||||
|
||||
# Validate max_tokens
|
||||
if not 500 <= self.max_tokens <= 5000:
|
||||
raise ModelValidationError(
|
||||
detail="Max tokens must be between 500 and 5000",
|
||||
code="invalid_max_tokens",
|
||||
pointer="/data/attributes/max_tokens",
|
||||
)
|
||||
|
||||
@property
|
||||
def api_key_decoded(self):
|
||||
"""Return the decrypted API key, or None if unavailable or invalid."""
|
||||
if not self.api_key:
|
||||
return None
|
||||
|
||||
try:
|
||||
decrypted_key = fernet.decrypt(bytes(self.api_key))
|
||||
return decrypted_key.decode()
|
||||
|
||||
except InvalidToken:
|
||||
logger.warning("Invalid token while decrypting API key.")
|
||||
except Exception as e:
|
||||
logger.exception("Unexpected error while decrypting API key: %s", e)
|
||||
|
||||
@api_key_decoded.setter
|
||||
def api_key_decoded(self, value):
|
||||
"""Store the encrypted API key."""
|
||||
if not value:
|
||||
raise ModelValidationError(
|
||||
detail="API key is required",
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
|
||||
# Validate OpenAI API key format
|
||||
openai_key_pattern = r"^sk-[\w-]+T3BlbkFJ[\w-]+$"
|
||||
if not re.match(openai_key_pattern, value):
|
||||
raise ModelValidationError(
|
||||
detail="Invalid OpenAI API key format.",
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
self.api_key = fernet.encrypt(value.encode())
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.full_clean()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "lighthouse_configurations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
# Add unique constraint for name within a tenant
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id"], name="unique_lighthouse_config_per_tenant"
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "lighthouse-configurations"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from drf_spectacular_jsonapi.schemas.pagination import JsonApiPageNumberPagination
|
||||
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
|
||||
|
||||
|
||||
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
|
||||
|
||||
@@ -2,7 +2,8 @@ from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import DEFAULT_DB_ALIAS, models
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
from django.db import models
|
||||
from django.db.backends.ddl_references import Statement, Table
|
||||
|
||||
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
|
||||
@@ -58,11 +59,11 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -87,7 +88,9 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = f"{self.rls_sql_query}{policy_queries}{grant_queries}"
|
||||
full_create_sql_query = (
|
||||
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
|
||||
)
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
@@ -104,20 +107,16 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
raw_table_name = model._meta.db_table
|
||||
table_name = raw_table_name
|
||||
if self.partition_name:
|
||||
raw_table_name = f"{raw_table_name}_{self.partition_name}"
|
||||
table_name = raw_table_name
|
||||
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
|
||||
)
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
raw_table_name=raw_table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,88 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialLogin
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
from api.adapters import ProwlerSocialAccountAdapter
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProwlerSocialAccountAdapter:
|
||||
def test_get_user_by_email_returns_user(self, create_test_user):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
user = adapter.get_user_by_email(create_test_user.email)
|
||||
assert user == create_test_user
|
||||
|
||||
def test_get_user_by_email_returns_none_for_unknown_email(self):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
assert adapter.get_user_by_email("notfound@example.com") is None
|
||||
|
||||
# def test_pre_social_login_links_existing_user(self, create_test_user, rf):
|
||||
# adapter = ProwlerSocialAccountAdapter()
|
||||
|
||||
# sociallogin = MagicMock(spec=SocialLogin)
|
||||
# sociallogin.account = MagicMock()
|
||||
# sociallogin.account.provider = "saml"
|
||||
# sociallogin.account.extra_data = {}
|
||||
# sociallogin.user = create_test_user
|
||||
# sociallogin.connect = MagicMock()
|
||||
|
||||
# adapter.pre_social_login(rf.get("/"), sociallogin)
|
||||
|
||||
# call_args = sociallogin.connect.call_args
|
||||
# assert call_args is not None
|
||||
|
||||
# called_request, called_user = call_args[0]
|
||||
# assert called_request.path == "/"
|
||||
# assert called_user.email == create_test_user.email
|
||||
|
||||
def test_pre_social_login_no_link_if_email_missing(self, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.provider = "github"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.connect = MagicMock()
|
||||
|
||||
adapter.pre_social_login(rf.get("/"), sociallogin)
|
||||
|
||||
sociallogin.connect.assert_not_called()
|
||||
|
||||
# def test_save_user_saml_flow(
|
||||
# self,
|
||||
# rf,
|
||||
# saml_setup,
|
||||
# saml_sociallogin,
|
||||
# ):
|
||||
# adapter = ProwlerSocialAccountAdapter()
|
||||
# request = rf.get("/")
|
||||
# saml_sociallogin.user.email = saml_setup["email"]
|
||||
# saml_sociallogin.account.extra_data = {
|
||||
# "firstName": [],
|
||||
# "lastName": [],
|
||||
# "organization": [],
|
||||
# "userType": [],
|
||||
# }
|
||||
|
||||
# tenant = Tenant.objects.using(MainRouter.admin_db).get(
|
||||
# id=saml_setup["tenant_id"]
|
||||
# )
|
||||
# saml_config = SAMLConfiguration.objects.using(MainRouter.admin_db).get(
|
||||
# tenant=tenant
|
||||
# )
|
||||
# assert saml_config.email_domain == saml_setup["domain"]
|
||||
|
||||
# user = adapter.save_user(request, saml_sociallogin)
|
||||
|
||||
# assert user.name == "N/A"
|
||||
# assert user.company_name == ""
|
||||
# assert user.email == saml_setup["email"]
|
||||
# assert (
|
||||
# Membership.objects.using(MainRouter.admin_db)
|
||||
# .filter(user=user, tenant=tenant)
|
||||
# .exists()
|
||||
# )
|
||||
@@ -1,12 +1,12 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from api.compliance import (
|
||||
generate_compliance_overview_template,
|
||||
generate_scan_compliance,
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_checks,
|
||||
load_prowler_compliance,
|
||||
load_prowler_checks,
|
||||
generate_scan_compliance,
|
||||
generate_compliance_overview_template,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
@@ -69,7 +69,7 @@ class TestCompliance:
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
from api.compliance import PROWLER_CHECKS, PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
|
||||
|
||||
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
|
||||
"template_key": "template_value"
|
||||
@@ -218,10 +218,6 @@ class TestCompliance:
|
||||
Description="Description of requirement 1",
|
||||
Attributes=[],
|
||||
Checks=["check1", "check2"],
|
||||
Tactics=["tactic1"],
|
||||
SubTechniques=["subtechnique1"],
|
||||
Platforms=["platform1"],
|
||||
TechniqueURL="https://example.com",
|
||||
)
|
||||
requirement2 = MagicMock(
|
||||
Id="requirement2",
|
||||
@@ -229,10 +225,6 @@ class TestCompliance:
|
||||
Description="Description of requirement 2",
|
||||
Attributes=[],
|
||||
Checks=[],
|
||||
Tactics=[],
|
||||
SubTechniques=[],
|
||||
Platforms=[],
|
||||
TechniqueURL="",
|
||||
)
|
||||
compliance1 = MagicMock(
|
||||
Requirements=[requirement1, requirement2],
|
||||
@@ -255,10 +247,6 @@ class TestCompliance:
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"tactics": ["tactic1"],
|
||||
"subtechniques": ["subtechnique1"],
|
||||
"platforms": ["platform1"],
|
||||
"technique_url": "https://example.com",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
@@ -272,10 +260,6 @@ class TestCompliance:
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"description": "Description of requirement 2",
|
||||
"tactics": [],
|
||||
"subtechniques": [],
|
||||
"platforms": [],
|
||||
"technique_url": "",
|
||||
"attributes": [],
|
||||
"checks": {},
|
||||
"checks_status": {
|
||||
@@ -284,7 +268,7 @@ class TestCompliance:
|
||||
"manual": 0,
|
||||
"total": 0,
|
||||
},
|
||||
"status": "MANUAL",
|
||||
"status": "PASS",
|
||||
},
|
||||
},
|
||||
"requirements_status": {
|
||||
|
||||
@@ -3,13 +3,9 @@ from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from freezegun import freeze_time
|
||||
|
||||
from api.db_utils import (
|
||||
_should_create_index_on_partition,
|
||||
batch_delete,
|
||||
create_objects_in_batches,
|
||||
enum_to_choices,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
@@ -142,88 +138,3 @@ class TestBatchDelete:
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
|
||||
|
||||
class TestShouldCreateIndexOnPartition:
|
||||
@freeze_time("2025-05-15 00:00:00Z")
|
||||
@pytest.mark.parametrize(
|
||||
"partition_name, all_partitions, expected",
|
||||
[
|
||||
("any_name", True, True),
|
||||
("findings_default", True, True),
|
||||
("findings_2022_jan", True, True),
|
||||
("foo_bar", False, True),
|
||||
("findings_2025_MAY", False, True),
|
||||
("findings_2025_may", False, True),
|
||||
("findings_2025_jun", False, True),
|
||||
("findings_2025_apr", False, False),
|
||||
("findings_2025_xyz", False, True),
|
||||
],
|
||||
)
|
||||
def test_partition_inclusion_logic(self, partition_name, all_partitions, expected):
|
||||
assert (
|
||||
_should_create_index_on_partition(partition_name, all_partitions)
|
||||
is expected
|
||||
)
|
||||
|
||||
@freeze_time("2025-05-15 00:00:00Z")
|
||||
def test_invalid_date_components(self):
|
||||
# even if regex matches but int conversion fails, we fallback True
|
||||
# (e.g. year too big, month number parse error)
|
||||
bad_name = "findings_99999_jan"
|
||||
assert _should_create_index_on_partition(bad_name, False) is True
|
||||
|
||||
bad_name2 = "findings_2025_abc"
|
||||
# abc not in month_map → fallback True
|
||||
assert _should_create_index_on_partition(bad_name2, False) is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCreateObjectsInBatches:
|
||||
@pytest.fixture
|
||||
def tenant(self, tenants_fixture):
|
||||
return tenants_fixture[0]
|
||||
|
||||
def make_provider_instances(self, tenant, count):
|
||||
"""
|
||||
Return a list of `count` unsaved Provider instances for the given tenant.
|
||||
"""
|
||||
base_uid = 1000
|
||||
return [
|
||||
Provider(
|
||||
tenant=tenant,
|
||||
uid=str(base_uid + i),
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
for i in range(count)
|
||||
]
|
||||
|
||||
def test_exact_multiple_of_batch(self, tenant):
|
||||
total = 6
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
def test_non_multiple_of_batch(self, tenant):
|
||||
total = 7
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
def test_batch_size_default(self, tenant):
|
||||
default_size = settings.DJANGO_DELETION_BATCH_SIZE
|
||||
total = default_size + 2
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
@@ -1,379 +0,0 @@
|
||||
import json
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from api.exceptions import (
|
||||
TaskFailedException,
|
||||
TaskInProgressException,
|
||||
TaskNotFoundException,
|
||||
)
|
||||
from api.models import Task, User
|
||||
from api.rls import Tenant
|
||||
from api.v1.mixins import PaginateByPkMixin, TaskManagementMixin
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestPaginateByPkMixin:
|
||||
@pytest.fixture
|
||||
def tenant(self):
|
||||
return Tenant.objects.create(name="Test Tenant")
|
||||
|
||||
@pytest.fixture
|
||||
def users(self, tenant):
|
||||
# Create 5 users with proper email field
|
||||
users = []
|
||||
for i in range(5):
|
||||
user = User.objects.create(email=f"user{i}@example.com", name=f"User {i}")
|
||||
users.append(user)
|
||||
return users
|
||||
|
||||
class DummyView(PaginateByPkMixin):
|
||||
def __init__(self, page):
|
||||
self._page = page
|
||||
|
||||
def paginate_queryset(self, qs):
|
||||
return self._page
|
||||
|
||||
def get_serializer(self, queryset, many):
|
||||
class S:
|
||||
def __init__(self, data):
|
||||
# serialize to list of ids
|
||||
self.data = [obj.id for obj in data] if many else queryset.id
|
||||
|
||||
return S(queryset)
|
||||
|
||||
def get_paginated_response(self, data):
|
||||
return Response({"results": data}, status=status.HTTP_200_OK)
|
||||
|
||||
def test_no_pagination(self, users):
|
||||
base_qs = User.objects.all().order_by("id")
|
||||
view = self.DummyView(page=None)
|
||||
resp = view.paginate_by_pk(
|
||||
request=None, base_queryset=base_qs, manager=User.objects
|
||||
)
|
||||
# since no pagination, should return all ids in order
|
||||
expected = [u.id for u in base_qs]
|
||||
assert isinstance(resp, Response)
|
||||
assert resp.data == expected
|
||||
|
||||
def test_with_pagination(self, users):
|
||||
base_qs = User.objects.all().order_by("id")
|
||||
# simulate paging to first 2 ids
|
||||
page = [base_qs[1].id, base_qs[3].id]
|
||||
view = self.DummyView(page=page)
|
||||
resp = view.paginate_by_pk(
|
||||
request=None, base_queryset=base_qs, manager=User.objects
|
||||
)
|
||||
# should fetch only those two users, in the same order as page
|
||||
assert resp.status_code == status.HTTP_200_OK
|
||||
assert resp.data == {"results": page}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTaskManagementMixin:
|
||||
class DummyView(TaskManagementMixin):
|
||||
pass
|
||||
|
||||
@pytest.fixture
|
||||
def tenant(self):
|
||||
return Tenant.objects.create(name="Test Tenant")
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def cleanup(self):
|
||||
Task.objects.all().delete()
|
||||
TaskResult.objects.all().delete()
|
||||
|
||||
def test_no_task_and_no_taskresult_raises_not_found(self):
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskNotFoundException):
|
||||
view.check_task_status("task_xyz", {"foo": "bar"})
|
||||
|
||||
def test_no_task_and_no_taskresult_returns_none_when_not_raising(self):
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status(
|
||||
"task_xyz", {"foo": "bar"}, raise_on_not_found=False
|
||||
)
|
||||
assert result is None
|
||||
|
||||
def test_taskresult_pending_raises_in_progress(self):
|
||||
task_kwargs = {"foo": "bar"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_xyz",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PENDING",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskInProgressException) as excinfo:
|
||||
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
|
||||
assert hasattr(excinfo.value, "task_result")
|
||||
assert excinfo.value.task_result == tr
|
||||
|
||||
def test_taskresult_started_raises_in_progress(self):
|
||||
task_kwargs = {"foo": "bar"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_xyz",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="STARTED",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskInProgressException) as excinfo:
|
||||
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
|
||||
assert hasattr(excinfo.value, "task_result")
|
||||
assert excinfo.value.task_result == tr
|
||||
|
||||
def test_taskresult_progress_raises_in_progress(self):
|
||||
task_kwargs = {"foo": "bar"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_xyz",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PROGRESS",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskInProgressException) as excinfo:
|
||||
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
|
||||
assert hasattr(excinfo.value, "task_result")
|
||||
assert excinfo.value.task_result == tr
|
||||
|
||||
def test_taskresult_failure_raises_failed(self):
|
||||
task_kwargs = {"a": 1}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_fail",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskFailedException):
|
||||
view.check_task_status("task_fail", task_kwargs, raise_on_not_found=False)
|
||||
|
||||
def test_taskresult_failure_returns_none_when_not_raising(self):
|
||||
task_kwargs = {"a": 1}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_fail",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status(
|
||||
"task_fail", task_kwargs, raise_on_failed=False, raise_on_not_found=False
|
||||
)
|
||||
assert result is None
|
||||
|
||||
def test_taskresult_success_returns_none(self):
|
||||
task_kwargs = {"x": 2}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_ok",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="SUCCESS",
|
||||
)
|
||||
view = self.DummyView()
|
||||
# should not raise, and returns None
|
||||
assert (
|
||||
view.check_task_status("task_ok", task_kwargs, raise_on_not_found=False)
|
||||
is None
|
||||
)
|
||||
|
||||
def test_taskresult_revoked_returns_none(self):
|
||||
task_kwargs = {"x": 2}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_revoked",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="REVOKED",
|
||||
)
|
||||
view = self.DummyView()
|
||||
# should not raise, and returns None
|
||||
assert (
|
||||
view.check_task_status(
|
||||
"task_revoked", task_kwargs, raise_on_not_found=False
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
def test_task_with_failed_status_raises_failed(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskFailedException) as excinfo:
|
||||
view.check_task_status("scan_task", task_kwargs)
|
||||
# Check that the exception contains the expected task
|
||||
assert hasattr(excinfo.value, "task")
|
||||
assert excinfo.value.task == task
|
||||
|
||||
def test_task_with_cancelled_status_raises_failed(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="REVOKED",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskFailedException) as excinfo:
|
||||
view.check_task_status("scan_task", task_kwargs)
|
||||
# Check that the exception contains the expected task
|
||||
assert hasattr(excinfo.value, "task")
|
||||
assert excinfo.value.task == task
|
||||
|
||||
def test_task_with_failed_status_returns_task_when_not_raising(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs, raise_on_failed=False)
|
||||
assert result == task
|
||||
|
||||
def test_task_with_completed_status_returns_none(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="SUCCESS",
|
||||
)
|
||||
Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs)
|
||||
assert result is None
|
||||
|
||||
def test_task_with_executing_status_returns_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="STARTED",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs)
|
||||
assert result is not None
|
||||
assert result.pk == task.pk
|
||||
|
||||
def test_task_with_pending_status_returns_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PENDING",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs)
|
||||
assert result is not None
|
||||
assert result.pk == task.pk
|
||||
|
||||
def test_get_task_response_if_running_returns_none_for_completed_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="SUCCESS",
|
||||
)
|
||||
Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running("scan_task", task_kwargs)
|
||||
assert result is None
|
||||
|
||||
def test_get_task_response_if_running_returns_none_for_no_task(self):
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running(
|
||||
"nonexistent", {"foo": "bar"}, raise_on_not_found=False
|
||||
)
|
||||
assert result is None
|
||||
|
||||
def test_get_task_response_if_running_returns_202_for_executing_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="STARTED",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running("scan_task", task_kwargs)
|
||||
|
||||
assert isinstance(result, Response)
|
||||
assert result.status_code == status.HTTP_202_ACCEPTED
|
||||
assert "Content-Location" in result.headers
|
||||
# The response should contain the serialized task data
|
||||
assert result.data is not None
|
||||
assert "id" in result.data
|
||||
assert str(result.data["id"]) == str(task.id)
|
||||
|
||||
def test_get_task_response_if_running_returns_none_for_available_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PENDING",
|
||||
)
|
||||
Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running("scan_task", task_kwargs)
|
||||
# PENDING maps to AVAILABLE, which is not EXECUTING, so should return None
|
||||
assert result is None
|
||||
|
||||
def test_kwargs_filtering_works_correctly(self, tenant):
|
||||
# Create tasks with different kwargs
|
||||
task_kwargs_1 = {"provider_id": "test1", "scan_type": "full"}
|
||||
task_kwargs_2 = {"provider_id": "test2", "scan_type": "quick"}
|
||||
|
||||
tr1 = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs_1),
|
||||
status="STARTED",
|
||||
)
|
||||
tr2 = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs_2),
|
||||
status="STARTED",
|
||||
)
|
||||
|
||||
task1 = Task.objects.create(tenant=tenant, task_runner_task=tr1)
|
||||
task2 = Task.objects.create(tenant=tenant, task_runner_task=tr2)
|
||||
|
||||
view = self.DummyView()
|
||||
|
||||
# Should find task1 when searching for its kwargs
|
||||
result1 = view.check_task_status("scan_task", {"provider_id": "test1"})
|
||||
assert result1 is not None
|
||||
assert result1.pk == task1.pk
|
||||
|
||||
# Should find task2 when searching for its kwargs
|
||||
result2 = view.check_task_status("scan_task", {"provider_id": "test2"})
|
||||
assert result2 is not None
|
||||
assert result2.pk == task2.pk
|
||||
|
||||
# Should not find anything when searching for non-existent kwargs
|
||||
result3 = view.check_task_status(
|
||||
"scan_task", {"provider_id": "test3"}, raise_on_not_found=False
|
||||
)
|
||||
assert result3 is None
|
||||
@@ -92,177 +92,3 @@ class TestResourceModel:
|
||||
|
||||
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
|
||||
assert resource.get_tags(tenant_id=tenant_id) == {}
|
||||
|
||||
|
||||
# @pytest.mark.django_db
|
||||
# class TestFindingModel:
|
||||
# def test_add_finding_with_long_uid(
|
||||
# self, providers_fixture, scans_fixture, resources_fixture
|
||||
# ):
|
||||
# provider, *_ = providers_fixture
|
||||
# tenant_id = provider.tenant_id
|
||||
|
||||
# long_uid = "1" * 500
|
||||
# _ = Finding.objects.create(
|
||||
# tenant_id=tenant_id,
|
||||
# uid=long_uid,
|
||||
# delta=Finding.DeltaChoices.NEW,
|
||||
# check_metadata={},
|
||||
# status=StatusChoices.PASS,
|
||||
# status_extended="",
|
||||
# severity="high",
|
||||
# impact="high",
|
||||
# raw_result={},
|
||||
# check_id="test_check",
|
||||
# scan=scans_fixture[0],
|
||||
# first_seen_at=None,
|
||||
# muted=False,
|
||||
# compliance={},
|
||||
# )
|
||||
# assert Finding.objects.filter(uid=long_uid).exists()
|
||||
|
||||
|
||||
# @pytest.mark.django_db
|
||||
# class TestSAMLConfigurationModel:
|
||||
# VALID_METADATA = """<?xml version='1.0' encoding='UTF-8'?>
|
||||
# <md:EntityDescriptor entityID='TEST' xmlns:md='urn:oasis:names:tc:SAML:2.0:metadata'>
|
||||
# <md:IDPSSODescriptor WantAuthnRequestsSigned='false' protocolSupportEnumeration='urn:oasis:names:tc:SAML:2.0:protocol'>
|
||||
# <md:KeyDescriptor use='signing'>
|
||||
# <ds:KeyInfo xmlns:ds='http://www.w3.org/2000/09/xmldsig#'>
|
||||
# <ds:X509Data>
|
||||
# <ds:X509Certificate>FAKECERTDATA</ds:X509Certificate>
|
||||
# </ds:X509Data>
|
||||
# </ds:KeyInfo>
|
||||
# </md:KeyDescriptor>
|
||||
# <md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST' Location='https://idp.test/sso'/>
|
||||
# </md:IDPSSODescriptor>
|
||||
# </md:EntityDescriptor>
|
||||
# """
|
||||
|
||||
# def test_creates_valid_configuration(self):
|
||||
# tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant A")
|
||||
# config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
# email_domain="ssoexample.com",
|
||||
# metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
# tenant=tenant,
|
||||
# )
|
||||
|
||||
# assert config.email_domain == "ssoexample.com"
|
||||
# assert SocialApp.objects.filter(client_id="ssoexample.com").exists()
|
||||
|
||||
# def test_email_domain_with_at_symbol_fails(self):
|
||||
# tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant B")
|
||||
# config = SAMLConfiguration(
|
||||
# email_domain="invalid@domain.com",
|
||||
# metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
# tenant=tenant,
|
||||
# )
|
||||
|
||||
# with pytest.raises(ValidationError) as exc_info:
|
||||
# config.clean()
|
||||
|
||||
# errors = exc_info.value.message_dict
|
||||
# assert "email_domain" in errors
|
||||
# assert "Domain must not contain @" in errors["email_domain"][0]
|
||||
|
||||
# def test_duplicate_email_domain_fails(self):
|
||||
# tenant1 = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant C1")
|
||||
# tenant2 = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant C2")
|
||||
|
||||
# SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
# email_domain="duplicate.com",
|
||||
# metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
# tenant=tenant1,
|
||||
# )
|
||||
|
||||
# config = SAMLConfiguration(
|
||||
# email_domain="duplicate.com",
|
||||
# metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
# tenant=tenant2,
|
||||
# )
|
||||
|
||||
# with pytest.raises(ValidationError) as exc_info:
|
||||
# config.clean()
|
||||
|
||||
# errors = exc_info.value.message_dict
|
||||
# assert "tenant" in errors
|
||||
# assert "There is a problem with your email domain." in errors["tenant"][0]
|
||||
|
||||
# def test_duplicate_tenant_config_fails(self):
|
||||
# tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant D")
|
||||
|
||||
# SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
# email_domain="unique1.com",
|
||||
# metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
# tenant=tenant,
|
||||
# )
|
||||
|
||||
# config = SAMLConfiguration(
|
||||
# email_domain="unique2.com",
|
||||
# metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
# tenant=tenant,
|
||||
# )
|
||||
|
||||
# with pytest.raises(ValidationError) as exc_info:
|
||||
# config.clean()
|
||||
|
||||
# errors = exc_info.value.message_dict
|
||||
# assert "tenant" in errors
|
||||
# assert (
|
||||
# "A SAML configuration already exists for this tenant."
|
||||
# in errors["tenant"][0]
|
||||
# )
|
||||
|
||||
# def test_invalid_metadata_xml_fails(self):
|
||||
# tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant E")
|
||||
# config = SAMLConfiguration(
|
||||
# email_domain="brokenxml.com",
|
||||
# metadata_xml="<bad<xml>",
|
||||
# tenant=tenant,
|
||||
# )
|
||||
|
||||
# with pytest.raises(ValidationError) as exc_info:
|
||||
# config._parse_metadata()
|
||||
|
||||
# errors = exc_info.value.message_dict
|
||||
# assert "metadata_xml" in errors
|
||||
# assert "Invalid XML" in errors["metadata_xml"][0]
|
||||
# assert "not well-formed" in errors["metadata_xml"][0]
|
||||
|
||||
# def test_metadata_missing_sso_fails(self):
|
||||
# tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant F")
|
||||
# xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
# <md:IDPSSODescriptor></md:IDPSSODescriptor>
|
||||
# </md:EntityDescriptor>"""
|
||||
# config = SAMLConfiguration(
|
||||
# email_domain="nosso.com",
|
||||
# metadata_xml=xml,
|
||||
# tenant=tenant,
|
||||
# )
|
||||
|
||||
# with pytest.raises(ValidationError) as exc_info:
|
||||
# config._parse_metadata()
|
||||
|
||||
# errors = exc_info.value.message_dict
|
||||
# assert "metadata_xml" in errors
|
||||
# assert "Missing SingleSignOnService" in errors["metadata_xml"][0]
|
||||
|
||||
# def test_metadata_missing_certificate_fails(self):
|
||||
# tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant G")
|
||||
# xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
# <md:IDPSSODescriptor>
|
||||
# <md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" Location="https://example.com/sso"/>
|
||||
# </md:IDPSSODescriptor>
|
||||
# </md:EntityDescriptor>"""
|
||||
# config = SAMLConfiguration(
|
||||
# email_domain="nocert.com",
|
||||
# metadata_xml=xml,
|
||||
# tenant=tenant,
|
||||
# )
|
||||
|
||||
# with pytest.raises(ValidationError) as exc_info:
|
||||
# config._parse_metadata()
|
||||
|
||||
# errors = exc_info.value.message_dict
|
||||
# assert "metadata_xml" in errors
|
||||
# assert "X509Certificate" in errors["metadata_xml"][0]
|
||||
|
||||
@@ -1,19 +1,7 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
from api.models import (
|
||||
Membership,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.v1.serializers import TokenSerializer
|
||||
from unittest.mock import patch, ANY, Mock
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -316,96 +304,3 @@ class TestProviderViewSet:
|
||||
reverse("provider-connection", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLimitedVisibility:
|
||||
TEST_EMAIL = "rbac@rbac.com"
|
||||
TEST_PASSWORD = "thisisapassword123"
|
||||
|
||||
@pytest.fixture
|
||||
def limited_admin_user(
|
||||
self, django_db_setup, django_db_blocker, tenants_fixture, providers_fixture
|
||||
):
|
||||
with django_db_blocker.unblock():
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email=self.TEST_EMAIL,
|
||||
password=self.TEST_PASSWORD,
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
role = Role.objects.create(
|
||||
name="limited_visibility",
|
||||
tenant=tenant,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
provider_group = ProviderGroup.objects.create(
|
||||
name="limited_visibility_group",
|
||||
tenant=tenant,
|
||||
)
|
||||
ProviderGroupMembership.objects.create(
|
||||
tenant=tenant,
|
||||
provider=provider,
|
||||
provider_group=provider_group,
|
||||
)
|
||||
|
||||
RoleProviderGroupRelationship.objects.create(
|
||||
tenant=tenant, role=role, provider_group=provider_group
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_limited(
|
||||
self, limited_admin_user, tenants_fixture, client
|
||||
):
|
||||
client.user = limited_admin_user
|
||||
tenant_id = tenants_fixture[0].id
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": self.TEST_EMAIL,
|
||||
"password": self.TEST_PASSWORD,
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
def test_integrations(
|
||||
self, authenticated_client_rbac_limited, integrations_fixture, providers_fixture
|
||||
):
|
||||
# Integration 2 is related to provider1 and provider 2
|
||||
# This user cannot see provider 2
|
||||
integration = integrations_fixture[1]
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id})
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert integration.providers.count() == 2
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import logging
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from config.settings.sentry import before_send
|
||||
|
||||
|
||||
def test_before_send_ignores_log_with_ignored_exception():
|
||||
"""Test that before_send ignores logs containing ignored exceptions."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Provider kubernetes is not connected"
|
||||
log_record.levelno = logging.ERROR # 40
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_before_send_ignores_exception_with_ignored_exception():
|
||||
"""Test that before_send ignores exceptions containing ignored exceptions."""
|
||||
exc_info = (Exception, Exception("Provider kubernetes is not connected"), None)
|
||||
|
||||
hint = {"exc_info": exc_info}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_before_send_passes_through_non_ignored_log():
|
||||
"""Test that before_send passes through logs that don't contain ignored exceptions."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Some other error message"
|
||||
log_record.levelno = logging.ERROR # 40
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was passed through
|
||||
assert result == event
|
||||
|
||||
|
||||
def test_before_send_passes_through_non_ignored_exception():
|
||||
"""Test that before_send passes through exceptions that don't contain ignored exceptions."""
|
||||
exc_info = (Exception, Exception("Some other error message"), None)
|
||||
|
||||
hint = {"exc_info": exc_info}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was passed through
|
||||
assert result == event
|
||||
|
||||
|
||||
def test_before_send_handles_warning_level():
|
||||
"""Test that before_send handles warning level logs."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Provider kubernetes is not connected"
|
||||
log_record.levelno = logging.WARNING # 30
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
@@ -19,7 +19,6 @@ from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from prowler.providers.m365.m365_provider import M365Provider
|
||||
|
||||
|
||||
class TestMergeDicts:
|
||||
@@ -105,7 +104,6 @@ class TestReturnProwlerProvider:
|
||||
(Provider.ProviderChoices.GCP.value, GcpProvider),
|
||||
(Provider.ProviderChoices.AZURE.value, AzureProvider),
|
||||
(Provider.ProviderChoices.KUBERNETES.value, KubernetesProvider),
|
||||
(Provider.ProviderChoices.M365.value, M365Provider),
|
||||
],
|
||||
)
|
||||
def test_return_prowler_provider(self, provider_type, expected_provider):
|
||||
@@ -178,10 +176,6 @@ class TestGetProwlerProviderKwargs:
|
||||
Provider.ProviderChoices.KUBERNETES.value,
|
||||
{"context": "provider_uid"},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.M365.value,
|
||||
{},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_prowler_provider_kwargs(self, provider_type, expected_extra_kwargs):
|
||||
@@ -254,7 +248,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
token="VALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_validation_error(self):
|
||||
@@ -269,7 +263,7 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_not_found(self):
|
||||
@@ -284,7 +278,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert exc_info.value.detail == "Invitation is not valid."
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_expired(self, invitation):
|
||||
@@ -332,27 +326,5 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="different@example.com"
|
||||
)
|
||||
|
||||
def test_valid_invitation_uppercase_email(self):
|
||||
"""Test that validate_invitation works with case-insensitive email lookup."""
|
||||
uppercase_email = "USER@example.com"
|
||||
|
||||
invitation = MagicMock(spec=Invitation)
|
||||
invitation.token = "VALID_TOKEN"
|
||||
invitation.email = uppercase_email
|
||||
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
|
||||
invitation.state = Invitation.State.PENDING
|
||||
invitation.tenant = MagicMock()
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
result = validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
token="VALID_TOKEN", email="different@example.com"
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,20 +1,16 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from allauth.socialaccount.providers.oauth2.client import OAuth2Client
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.db.models import Subquery
|
||||
from rest_framework.exceptions import NotFound, ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Invitation, Provider, Resource
|
||||
from api.v1.serializers import FindingMetadataSerializer
|
||||
from api.models import Invitation, Provider
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.common.models import Connection
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from prowler.providers.m365.m365_provider import M365Provider
|
||||
|
||||
|
||||
class CustomOAuth2Client(OAuth2Client):
|
||||
@@ -55,14 +51,14 @@ def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
|
||||
def return_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider]:
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider]:
|
||||
"""Return the Prowler provider class based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: The corresponding provider class.
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: The corresponding provider class.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type specified in `provider.provider` is not supported.
|
||||
@@ -76,8 +72,6 @@ def return_prowler_provider(
|
||||
prowler_provider = AzureProvider
|
||||
case Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider = KubernetesProvider
|
||||
case Provider.ProviderChoices.M365.value:
|
||||
prowler_provider = M365Provider
|
||||
case _:
|
||||
raise ValueError(f"Provider type {provider.provider} not supported")
|
||||
return prowler_provider
|
||||
@@ -110,15 +104,15 @@ def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
|
||||
def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider:
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider:
|
||||
"""Initialize a Prowler provider instance based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, or `KubernetesProvider`) initialized with the
|
||||
provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
@@ -136,12 +130,10 @@ def prowler_provider_connection_test(provider: Provider) -> Connection:
|
||||
Connection: A connection object representing the result of the connection test for the specified provider.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
|
||||
try:
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
except Provider.secret.RelatedObjectDoesNotExist as secret_error:
|
||||
return Connection(is_connected=False, error=secret_error)
|
||||
|
||||
return prowler_provider.test_connection(
|
||||
**prowler_provider_kwargs, provider_id=provider.uid, raise_on_exception=False
|
||||
)
|
||||
@@ -187,7 +179,7 @@ def validate_invitation(
|
||||
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
|
||||
# is not a member of yet
|
||||
invitation = Invitation.objects.using(MainRouter.admin_db).get(
|
||||
token=invitation_token, email__iexact=email
|
||||
token=invitation_token, email=email
|
||||
)
|
||||
except Invitation.DoesNotExist:
|
||||
if raise_not_found:
|
||||
@@ -208,33 +200,3 @@ def validate_invitation(
|
||||
)
|
||||
|
||||
return invitation
|
||||
|
||||
|
||||
# ToRemove after removing the fallback mechanism in /findings/metadata
|
||||
def get_findings_metadata_no_aggregations(tenant_id: str, filtered_queryset):
|
||||
filtered_ids = filtered_queryset.order_by().values("id")
|
||||
|
||||
relevant_resources = Resource.all_objects.filter(
|
||||
tenant_id=tenant_id, findings__id__in=Subquery(filtered_ids)
|
||||
).only("service", "region", "type")
|
||||
|
||||
aggregation = relevant_resources.aggregate(
|
||||
services=ArrayAgg("service", flat=True),
|
||||
regions=ArrayAgg("region", flat=True),
|
||||
resource_types=ArrayAgg("type", flat=True),
|
||||
)
|
||||
|
||||
services = sorted(set(aggregation["services"] or []))
|
||||
regions = sorted({region for region in aggregation["regions"] or [] if region})
|
||||
resource_types = sorted(set(aggregation["resource_types"] or []))
|
||||
|
||||
result = {
|
||||
"services": services,
|
||||
"regions": regions,
|
||||
"resource_types": resource_types,
|
||||
}
|
||||
|
||||
serializer = FindingMetadataSerializer(data=result)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
return serializer.data
|
||||
|
||||
@@ -1,222 +0,0 @@
|
||||
from django.urls import reverse
|
||||
from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from api.exceptions import (
|
||||
TaskFailedException,
|
||||
TaskInProgressException,
|
||||
TaskNotFoundException,
|
||||
)
|
||||
from api.models import StateChoices, Task
|
||||
from api.v1.serializers import TaskSerializer
|
||||
|
||||
|
||||
class PaginateByPkMixin:
|
||||
"""
|
||||
Mixin to paginate on a list of PKs (cheaper than heavy JOINs),
|
||||
re-fetch the full objects with the desired select/prefetch,
|
||||
re-sort them to preserve DB ordering, then serialize + return.
|
||||
"""
|
||||
|
||||
def paginate_by_pk(
|
||||
self,
|
||||
request, # noqa: F841
|
||||
base_queryset,
|
||||
manager,
|
||||
select_related: list[str] | None = None,
|
||||
prefetch_related: list[str] | None = None,
|
||||
) -> Response:
|
||||
pk_list = base_queryset.values_list("id", flat=True)
|
||||
page = self.paginate_queryset(pk_list)
|
||||
if page is None:
|
||||
return Response(self.get_serializer(base_queryset, many=True).data)
|
||||
|
||||
queryset = manager.filter(id__in=page)
|
||||
if select_related:
|
||||
queryset = queryset.select_related(*select_related)
|
||||
if prefetch_related:
|
||||
queryset = queryset.prefetch_related(*prefetch_related)
|
||||
|
||||
queryset = sorted(queryset, key=lambda obj: page.index(obj.id))
|
||||
|
||||
serialized = self.get_serializer(queryset, many=True).data
|
||||
return self.get_paginated_response(serialized)
|
||||
|
||||
|
||||
class TaskManagementMixin:
|
||||
"""
|
||||
Mixin to manage task status checking.
|
||||
|
||||
This mixin provides functionality to check if a task with specific parameters
|
||||
is running, completed, failed, or doesn't exist. It returns the task when running
|
||||
and raises specific exceptions for failed/not found scenarios that can be handled
|
||||
at the view level.
|
||||
"""
|
||||
|
||||
def check_task_status(
|
||||
self,
|
||||
task_name: str,
|
||||
task_kwargs: dict,
|
||||
raise_on_failed: bool = True,
|
||||
raise_on_not_found: bool = True,
|
||||
) -> Task | None:
|
||||
"""
|
||||
Check the status of a task with given name and kwargs.
|
||||
|
||||
This method first checks for a related Task object, and if not found,
|
||||
checks TaskResult directly. If a TaskResult is found and running but
|
||||
there's no related Task, it raises TaskInProgressException.
|
||||
|
||||
Args:
|
||||
task_name (str): The name of the task to check
|
||||
task_kwargs (dict): The kwargs to match against the task
|
||||
raise_on_failed (bool): Whether to raise exception if task failed
|
||||
raise_on_not_found (bool): Whether to raise exception if task not found
|
||||
|
||||
Returns:
|
||||
Task | None: The task instance if found (regardless of state), None if not found and raise_on_not_found=False
|
||||
|
||||
Raises:
|
||||
TaskFailedException: If task failed and raise_on_failed=True
|
||||
TaskNotFoundException: If task not found and raise_on_not_found=True
|
||||
TaskInProgressException: If task is running but no related Task object exists
|
||||
"""
|
||||
# First, try to find a Task object with related TaskResult
|
||||
try:
|
||||
# Build the filter for task kwargs
|
||||
task_filter = {
|
||||
"task_runner_task__task_name": task_name,
|
||||
}
|
||||
|
||||
# Add kwargs filters - we need to check if the task kwargs contain our parameters
|
||||
for key, value in task_kwargs.items():
|
||||
task_filter["task_runner_task__task_kwargs__contains"] = str(value)
|
||||
|
||||
task = (
|
||||
Task.objects.filter(**task_filter)
|
||||
.select_related("task_runner_task")
|
||||
.order_by("-inserted_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if task:
|
||||
# Get task state using the same logic as TaskSerializer
|
||||
task_state_mapping = {
|
||||
"PENDING": StateChoices.AVAILABLE,
|
||||
"STARTED": StateChoices.EXECUTING,
|
||||
"PROGRESS": StateChoices.EXECUTING,
|
||||
"SUCCESS": StateChoices.COMPLETED,
|
||||
"FAILURE": StateChoices.FAILED,
|
||||
"REVOKED": StateChoices.CANCELLED,
|
||||
}
|
||||
|
||||
celery_status = (
|
||||
task.task_runner_task.status if task.task_runner_task else None
|
||||
)
|
||||
task_state = task_state_mapping.get(
|
||||
celery_status or "", StateChoices.AVAILABLE
|
||||
)
|
||||
|
||||
# Check task state and raise exceptions accordingly
|
||||
if task_state in (StateChoices.FAILED, StateChoices.CANCELLED):
|
||||
if raise_on_failed:
|
||||
raise TaskFailedException(task=task)
|
||||
return task
|
||||
elif task_state == StateChoices.COMPLETED:
|
||||
return None
|
||||
|
||||
return task
|
||||
|
||||
except Task.DoesNotExist:
|
||||
pass
|
||||
|
||||
# If no Task found, check TaskResult directly
|
||||
try:
|
||||
# Build the filter for TaskResult
|
||||
task_result_filter = {
|
||||
"task_name": task_name,
|
||||
}
|
||||
|
||||
# Add kwargs filters - check if the task kwargs contain our parameters
|
||||
for key, value in task_kwargs.items():
|
||||
task_result_filter["task_kwargs__contains"] = str(value)
|
||||
|
||||
task_result = (
|
||||
TaskResult.objects.filter(**task_result_filter)
|
||||
.order_by("-date_created")
|
||||
.first()
|
||||
)
|
||||
|
||||
if task_result:
|
||||
# Check if the TaskResult indicates a running task
|
||||
if task_result.status in ["PENDING", "STARTED", "PROGRESS"]:
|
||||
# Task is running but no related Task object exists
|
||||
raise TaskInProgressException(task_result=task_result)
|
||||
elif task_result.status == "FAILURE":
|
||||
if raise_on_failed:
|
||||
raise TaskFailedException(task=None)
|
||||
# For other statuses (SUCCESS, REVOKED), we don't have a Task to return,
|
||||
# so we treat it as not found
|
||||
|
||||
except TaskResult.DoesNotExist:
|
||||
pass
|
||||
|
||||
# No task found at all
|
||||
if raise_on_not_found:
|
||||
raise TaskNotFoundException()
|
||||
return None
|
||||
|
||||
def get_task_response_if_running(
|
||||
self,
|
||||
task_name: str,
|
||||
task_kwargs: dict,
|
||||
raise_on_failed: bool = True,
|
||||
raise_on_not_found: bool = True,
|
||||
) -> Response | None:
|
||||
"""
|
||||
Get a 202 response with task details if the task is currently running.
|
||||
|
||||
This method is useful for endpoints that should return task status when
|
||||
a background task is in progress, similar to the compliance overview endpoints.
|
||||
|
||||
Args:
|
||||
task_name (str): The name of the task to check
|
||||
task_kwargs (dict): The kwargs to match against the task
|
||||
|
||||
Returns:
|
||||
Response | None: 202 response with task details if running, None otherwise
|
||||
"""
|
||||
task = self.check_task_status(
|
||||
task_name=task_name,
|
||||
task_kwargs=task_kwargs,
|
||||
raise_on_failed=raise_on_failed,
|
||||
raise_on_not_found=raise_on_not_found,
|
||||
)
|
||||
|
||||
if not task:
|
||||
return None
|
||||
|
||||
# Get task state
|
||||
task_state_mapping = {
|
||||
"PENDING": StateChoices.AVAILABLE,
|
||||
"STARTED": StateChoices.EXECUTING,
|
||||
"PROGRESS": StateChoices.EXECUTING,
|
||||
"SUCCESS": StateChoices.COMPLETED,
|
||||
"FAILURE": StateChoices.FAILED,
|
||||
"REVOKED": StateChoices.CANCELLED,
|
||||
}
|
||||
|
||||
celery_status = task.task_runner_task.status if task.task_runner_task else None
|
||||
task_state = task_state_mapping.get(celery_status or "", StateChoices.AVAILABLE)
|
||||
|
||||
if task_state == StateChoices.EXECUTING:
|
||||
self.response_serializer_class = TaskSerializer
|
||||
serializer = TaskSerializer(task)
|
||||
return Response(
|
||||
data=serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse("task-detail", kwargs={"pk": task.id})
|
||||
},
|
||||
)
|
||||
@@ -1,122 +0,0 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class S3ConfigSerializer(BaseValidateSerializer):
|
||||
bucket_name = serializers.CharField()
|
||||
output_directory = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class AWSCredentialSerializer(BaseValidateSerializer):
|
||||
role_arn = serializers.CharField(required=False)
|
||||
external_id = serializers.CharField(required=False)
|
||||
role_session_name = serializers.CharField(required=False)
|
||||
session_duration = serializers.IntegerField(
|
||||
required=False, min_value=900, max_value=43200
|
||||
)
|
||||
aws_access_key_id = serializers.CharField(required=False)
|
||||
aws_secret_access_key = serializers.CharField(required=False)
|
||||
aws_session_token = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Credentials",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationCredentialField(serializers.JSONField):
|
||||
pass
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Amazon S3",
|
||||
"properties": {
|
||||
"bucket_name": {
|
||||
"type": "string",
|
||||
"description": "The name of the S3 bucket where files will be stored.",
|
||||
},
|
||||
"output_directory": {
|
||||
"type": "string",
|
||||
"description": "The directory path within the bucket where files will be saved.",
|
||||
},
|
||||
},
|
||||
"required": ["bucket_name", "output_directory"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationConfigField(serializers.JSONField):
|
||||
pass
|
||||
@@ -1,183 +0,0 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Static Credentials",
|
||||
"properties": {
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Required for environments where no IAM role is being "
|
||||
"assumed and direct AWS access is needed.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Must accompany 'aws_access_key_id' to authorize "
|
||||
"access to AWS resources.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token associated with temporary credentials. Only needed for "
|
||||
"session-based or temporary AWS access.",
|
||||
},
|
||||
},
|
||||
"required": ["aws_access_key_id", "aws_secret_access_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Assume Role",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
"required": ["role_arn", "external_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Azure Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "tenant_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "M365 Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
"user": {
|
||||
"type": "email",
|
||||
"description": "User microsoft email address.",
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"description": "User password.",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"client_id",
|
||||
"client_secret",
|
||||
"tenant_id",
|
||||
"user",
|
||||
"password",
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The client ID from Google Cloud, used to identify the application for GCP "
|
||||
"access.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the GCP client ID, required for secure "
|
||||
"access.",
|
||||
},
|
||||
"refresh_token": {
|
||||
"type": "string",
|
||||
"description": "A refresh token that allows the application to obtain new access tokens for "
|
||||
"extended use.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "refresh_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Service Account Key",
|
||||
"properties": {
|
||||
"service_account_key": {
|
||||
"type": "object",
|
||||
"description": "The service account key for GCP.",
|
||||
}
|
||||
},
|
||||
"required": ["service_account_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Kubernetes Static Credentials",
|
||||
"properties": {
|
||||
"kubeconfig_content": {
|
||||
"type": "string",
|
||||
"description": "The content of the Kubernetes kubeconfig file, encoded as a string.",
|
||||
}
|
||||
},
|
||||
"required": ["kubeconfig_content"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class ProviderSecretField(serializers.JSONField):
|
||||
pass
|
||||
@@ -14,12 +14,10 @@ from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
InvitationRoleRelationship,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
@@ -31,19 +29,11 @@ from api.models import (
|
||||
RoleProviderGroupRelationship,
|
||||
Scan,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.v1.serializer_utils.integrations import (
|
||||
AWSCredentialSerializer,
|
||||
IntegrationConfigField,
|
||||
IntegrationCredentialField,
|
||||
S3ConfigSerializer,
|
||||
)
|
||||
from api.v1.serializer_utils.providers import ProviderSecretField
|
||||
|
||||
# Tokens
|
||||
|
||||
@@ -853,10 +843,6 @@ class ScanSerializer(RLSSerializer):
|
||||
"url",
|
||||
]
|
||||
|
||||
included_serializers = {
|
||||
"provider": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
}
|
||||
|
||||
|
||||
class ScanIncludeSerializer(RLSSerializer):
|
||||
trigger = serializers.ChoiceField(
|
||||
@@ -961,15 +947,6 @@ class ScanReportSerializer(serializers.Serializer):
|
||||
fields = ["id"]
|
||||
|
||||
|
||||
class ScanComplianceReportSerializer(serializers.Serializer):
|
||||
id = serializers.CharField(source="scan")
|
||||
name = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "scan-reports"
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ResourceTagSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the ResourceTag model
|
||||
@@ -1102,7 +1079,6 @@ class FindingSerializer(RLSSerializer):
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"url",
|
||||
# Relationships
|
||||
"scan",
|
||||
@@ -1152,16 +1128,12 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
|
||||
serializer = GCPProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.KUBERNETES.value:
|
||||
serializer = KubernetesProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.M365.value:
|
||||
serializer = M365ProviderSecret(data=secret)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{"provider": f"Provider type not supported {provider_type}"}
|
||||
)
|
||||
elif secret_type == ProviderSecret.TypeChoices.ROLE:
|
||||
serializer = AWSRoleAssumptionProviderSecret(data=secret)
|
||||
elif secret_type == ProviderSecret.TypeChoices.SERVICE_ACCOUNT:
|
||||
serializer = GCPServiceAccountProviderSecret(data=secret)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{"secret_type": f"Secret type not supported: {secret_type}"}
|
||||
@@ -1195,17 +1167,6 @@ class AzureProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class M365ProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
tenant_id = serializers.CharField()
|
||||
user = serializers.EmailField(required=False)
|
||||
password = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class GCPProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
@@ -1215,13 +1176,6 @@ class GCPProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class GCPServiceAccountProviderSecret(serializers.Serializer):
|
||||
service_account_key = serializers.JSONField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class KubernetesProviderSecret(serializers.Serializer):
|
||||
kubeconfig_content = serializers.CharField()
|
||||
|
||||
@@ -1244,6 +1198,141 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Static Credentials",
|
||||
"properties": {
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Required for environments where no IAM role is being "
|
||||
"assumed and direct AWS access is needed.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Must accompany 'aws_access_key_id' to authorize "
|
||||
"access to AWS resources.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token associated with temporary credentials. Only needed for "
|
||||
"session-based or temporary AWS access.",
|
||||
},
|
||||
},
|
||||
"required": ["aws_access_key_id", "aws_secret_access_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Assume Role",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
"required": ["role_arn", "external_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Azure Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "tenant_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The client ID from Google Cloud, used to identify the application for GCP "
|
||||
"access.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the GCP client ID, required for secure "
|
||||
"access.",
|
||||
},
|
||||
"refresh_token": {
|
||||
"type": "string",
|
||||
"description": "A refresh token that allows the application to obtain new access tokens for "
|
||||
"extended use.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "refresh_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Kubernetes Static Credentials",
|
||||
"properties": {
|
||||
"kubeconfig_content": {
|
||||
"type": "string",
|
||||
"description": "The content of the Kubernetes kubeconfig file, encoded as a string.",
|
||||
}
|
||||
},
|
||||
"required": ["kubeconfig_content"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class ProviderSecretField(serializers.JSONField):
|
||||
pass
|
||||
|
||||
|
||||
class ProviderSecretSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the ProviderSecret model.
|
||||
@@ -1308,13 +1397,12 @@ class ProviderSecretUpdateSerializer(BaseWriteProviderSecretSerializer):
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"provider": {"read_only": True},
|
||||
"secret_type": {"required": False},
|
||||
"secret_type": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
provider = self.instance.provider
|
||||
# To allow updating a secret with the same type without making the `secret_type` mandatory
|
||||
secret_type = attrs.get("secret_type") or self.instance.secret_type
|
||||
secret_type = self.instance.secret_type
|
||||
secret = attrs.get("secret")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
@@ -1518,8 +1606,8 @@ class RoleSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"manage_account",
|
||||
# Disable for the first release
|
||||
# "manage_billing",
|
||||
# "manage_integrations",
|
||||
# /Disable for the first release
|
||||
"manage_integrations",
|
||||
"manage_providers",
|
||||
"manage_scans",
|
||||
"permission_state",
|
||||
@@ -1681,64 +1769,124 @@ class RoleProviderGroupRelationshipSerializer(RLSSerializer, BaseWriteSerializer
|
||||
# Compliance overview
|
||||
|
||||
|
||||
class ComplianceOverviewSerializer(serializers.Serializer):
|
||||
class ComplianceOverviewSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for compliance requirement status aggregated by compliance framework.
|
||||
|
||||
This serializer is used to format aggregated compliance framework data,
|
||||
providing counts of passed, failed, and manual requirements along with
|
||||
an overall global status for each framework.
|
||||
Serializer for the ComplianceOverview model.
|
||||
"""
|
||||
|
||||
# Add ID field which will be used for resource identification
|
||||
id = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
version = serializers.CharField()
|
||||
requirements_passed = serializers.IntegerField()
|
||||
requirements_failed = serializers.IntegerField()
|
||||
requirements_manual = serializers.IntegerField()
|
||||
total_requirements = serializers.IntegerField()
|
||||
requirements_status = serializers.SerializerMethodField(
|
||||
read_only=True, method_name="get_requirements_status"
|
||||
)
|
||||
provider_type = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-overviews"
|
||||
class Meta:
|
||||
model = ComplianceOverview
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"compliance_id",
|
||||
"framework",
|
||||
"version",
|
||||
"requirements_status",
|
||||
"region",
|
||||
"provider_type",
|
||||
"scan",
|
||||
"url",
|
||||
]
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"passed": {"type": "integer"},
|
||||
"failed": {"type": "integer"},
|
||||
"manual": {"type": "integer"},
|
||||
"total": {"type": "integer"},
|
||||
},
|
||||
}
|
||||
)
|
||||
def get_requirements_status(self, obj):
|
||||
return {
|
||||
"passed": obj.requirements_passed,
|
||||
"failed": obj.requirements_failed,
|
||||
"manual": obj.requirements_manual,
|
||||
"total": obj.total_requirements,
|
||||
}
|
||||
|
||||
@extend_schema_field(serializers.CharField(allow_null=True))
|
||||
def get_provider_type(self, obj):
|
||||
"""
|
||||
Retrieves the provider_type from scan.provider.provider_type.
|
||||
"""
|
||||
try:
|
||||
return obj.scan.provider.provider
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
|
||||
class ComplianceOverviewDetailSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for detailed compliance requirement information.
|
||||
class ComplianceOverviewFullSerializer(ComplianceOverviewSerializer):
|
||||
requirements = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
This serializer formats the aggregated requirement data, showing detailed status
|
||||
and counts for each requirement across all regions.
|
||||
"""
|
||||
class Meta(ComplianceOverviewSerializer.Meta):
|
||||
fields = ComplianceOverviewSerializer.Meta.fields + [
|
||||
"description",
|
||||
"requirements",
|
||||
]
|
||||
|
||||
id = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
version = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
status = serializers.ChoiceField(choices=StatusChoices.choices)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-requirements-details"
|
||||
|
||||
|
||||
class ComplianceOverviewAttributesSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
framework_description = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
version = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
attributes = serializers.JSONField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-requirements-attributes"
|
||||
|
||||
|
||||
class ComplianceOverviewMetadataSerializer(serializers.Serializer):
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-overviews-metadata"
|
||||
@extend_schema_field(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"requirement_id": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"checks": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"check_name": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["PASS", "FAIL", None],
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"description": "Each key in the 'checks' object is a check name, with values as "
|
||||
"'PASS', 'FAIL', or null.",
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["PASS", "FAIL", "MANUAL"],
|
||||
},
|
||||
"attributes": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
"description": {"type": "string"},
|
||||
"checks_status": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"total": {"type": "integer"},
|
||||
"pass": {"type": "integer"},
|
||||
"fail": {"type": "integer"},
|
||||
"manual": {"type": "integer"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
def get_requirements(self, obj):
|
||||
"""
|
||||
Returns the detailed structure of requirements.
|
||||
"""
|
||||
return obj.requirements
|
||||
|
||||
|
||||
# Overviews
|
||||
@@ -1865,354 +2013,3 @@ class ScheduleDailyCreateSerializer(serializers.Serializer):
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class BaseWriteIntegrationSerializer(BaseWriteSerializer):
|
||||
@staticmethod
|
||||
def validate_integration_data(
|
||||
integration_type: str,
|
||||
providers: list[Provider], # noqa
|
||||
configuration: dict,
|
||||
credentials: dict,
|
||||
):
|
||||
if integration_type == Integration.IntegrationChoices.S3:
|
||||
config_serializer = S3ConfigSerializer
|
||||
credentials_serializers = [AWSCredentialSerializer]
|
||||
# TODO: This will be required for AWS Security Hub
|
||||
# if providers and not all(
|
||||
# provider.provider == Provider.ProviderChoices.AWS
|
||||
# for provider in providers
|
||||
# ):
|
||||
# raise serializers.ValidationError(
|
||||
# {"providers": "All providers must be AWS for the S3 integration."}
|
||||
# )
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"integration_type": f"Integration type not supported yet: {integration_type}"
|
||||
}
|
||||
)
|
||||
|
||||
config_serializer(data=configuration).is_valid(raise_exception=True)
|
||||
|
||||
for cred_serializer in credentials_serializers:
|
||||
try:
|
||||
cred_serializer(data=credentials).is_valid(raise_exception=True)
|
||||
break
|
||||
except ValidationError:
|
||||
continue
|
||||
else:
|
||||
raise ValidationError(
|
||||
{"credentials": "Invalid credentials for the integration type."}
|
||||
)
|
||||
|
||||
|
||||
class IntegrationSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Integration model.
|
||||
"""
|
||||
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"providers",
|
||||
"url",
|
||||
]
|
||||
|
||||
included_serializers = {
|
||||
"providers": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
representation = super().to_representation(instance)
|
||||
allowed_providers = self.context.get("allowed_providers")
|
||||
if allowed_providers:
|
||||
allowed_provider_ids = {str(provider.id) for provider in allowed_providers}
|
||||
representation["providers"] = [
|
||||
provider
|
||||
for provider in representation["providers"]
|
||||
if provider["id"] in allowed_provider_ids
|
||||
]
|
||||
return representation
|
||||
|
||||
|
||||
class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
|
||||
credentials = IntegrationCredentialField(write_only=True)
|
||||
configuration = IntegrationConfigField()
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"credentials",
|
||||
"providers",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"connected": {"read_only": True},
|
||||
"enabled": {"read_only": True},
|
||||
"connection_last_checked_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
integration_type = attrs.get("integration_type")
|
||||
providers = attrs.get("providers")
|
||||
configuration = attrs.get("configuration")
|
||||
credentials = attrs.get("credentials")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_integration_data(
|
||||
integration_type, providers, configuration, credentials
|
||||
)
|
||||
return validated_attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
providers = validated_data.pop("providers", [])
|
||||
integration = Integration.objects.create(tenant_id=tenant_id, **validated_data)
|
||||
|
||||
through_model_instances = [
|
||||
IntegrationProviderRelationship(
|
||||
integration=integration,
|
||||
provider=provider,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for provider in providers
|
||||
]
|
||||
IntegrationProviderRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
return integration
|
||||
|
||||
|
||||
class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
credentials = IntegrationCredentialField(write_only=True, required=False)
|
||||
configuration = IntegrationConfigField(required=False)
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"credentials",
|
||||
"providers",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"connected": {"read_only": True},
|
||||
"connection_last_checked_at": {"read_only": True},
|
||||
"integration_type": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
integration_type = self.instance.integration_type
|
||||
providers = attrs.get("providers")
|
||||
configuration = attrs.get("configuration") or self.instance.configuration
|
||||
credentials = attrs.get("credentials") or self.instance.credentials
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_integration_data(
|
||||
integration_type, providers, configuration, credentials
|
||||
)
|
||||
return validated_attrs
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if validated_data.get("providers") is not None:
|
||||
instance.providers.clear()
|
||||
new_relationships = [
|
||||
IntegrationProviderRelationship(
|
||||
integration=instance, provider=provider, tenant_id=tenant_id
|
||||
)
|
||||
for provider in validated_data["providers"]
|
||||
]
|
||||
IntegrationProviderRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
# SSO
|
||||
|
||||
|
||||
# class SamlInitiateSerializer(serializers.Serializer):
|
||||
# email_domain = serializers.CharField()
|
||||
|
||||
# class JSONAPIMeta:
|
||||
# resource_name = "saml-initiate"
|
||||
|
||||
|
||||
# class SamlMetadataSerializer(serializers.Serializer):
|
||||
# class JSONAPIMeta:
|
||||
# resource_name = "saml-meta"
|
||||
|
||||
|
||||
# class SAMLConfigurationSerializer(RLSSerializer):
|
||||
# class Meta:
|
||||
# model = SAMLConfiguration
|
||||
# fields = ["id", "email_domain", "metadata_xml", "created_at", "updated_at"]
|
||||
# read_only_fields = ["id", "created_at", "updated_at"]
|
||||
|
||||
|
||||
class LighthouseConfigSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the LighthouseConfig model.
|
||||
"""
|
||||
|
||||
api_key = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Check if api_key is specifically requested in fields param
|
||||
fields_param = self.context.get("request", None) and self.context[
|
||||
"request"
|
||||
].query_params.get("fields[lighthouse-config]", "")
|
||||
if fields_param == "api_key":
|
||||
# Return decrypted key if specifically requested
|
||||
data["api_key"] = instance.api_key_decoded if instance.api_key else None
|
||||
else:
|
||||
# Return masked key for general requests
|
||||
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
|
||||
return data
|
||||
|
||||
|
||||
class LighthouseConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""Serializer for creating new Lighthouse configurations."""
|
||||
|
||||
api_key = serializers.CharField(write_only=True, required=True)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
tenant_id = self.context.get("request").tenant_id
|
||||
if LighthouseConfiguration.objects.filter(tenant_id=tenant_id).exists():
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"tenant_id": "Lighthouse configuration already exists for this tenant."
|
||||
}
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
api_key = validated_data.pop("api_key")
|
||||
instance = super().create(validated_data)
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Always mask the API key in the response
|
||||
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
|
||||
return data
|
||||
|
||||
|
||||
class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for updating LighthouseConfig instances.
|
||||
"""
|
||||
|
||||
api_key = serializers.CharField(write_only=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"name": {"required": False},
|
||||
"model": {"required": False},
|
||||
"temperature": {"required": False},
|
||||
"max_tokens": {"required": False},
|
||||
}
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
api_key = validated_data.pop("api_key", None)
|
||||
instance = super().update(instance, validated_data)
|
||||
if api_key:
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
@@ -10,10 +10,8 @@ from api.v1.views import (
|
||||
FindingViewSet,
|
||||
GithubSocialLoginView,
|
||||
GoogleSocialLoginView,
|
||||
IntegrationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
InvitationViewSet,
|
||||
LighthouseConfigViewSet,
|
||||
MembershipViewSet,
|
||||
OverviewViewSet,
|
||||
ProviderGroupProvidersRelationshipView,
|
||||
@@ -49,13 +47,6 @@ router.register(
|
||||
)
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
router.register(r"integrations", IntegrationViewSet, basename="integration")
|
||||
# router.register(r"saml-config", SAMLConfigurationViewSet, basename="saml-config")
|
||||
router.register(
|
||||
r"lighthouse-configurations",
|
||||
LighthouseConfigViewSet,
|
||||
basename="lighthouseconfiguration",
|
||||
)
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
@@ -119,24 +110,6 @@ urlpatterns = [
|
||||
),
|
||||
name="provider_group-providers-relationship",
|
||||
),
|
||||
# API endpoint to start SAML SSO flow (WIP)
|
||||
# path(
|
||||
# "auth/saml/initiate/", SAMLInitiateAPIView.as_view(), name="api_saml_initiate"
|
||||
# ),
|
||||
# # Custom SAML endpoints (must come before allauth.urls) (WIP)
|
||||
# path(
|
||||
# "accounts/saml/<organization_slug>/login/",
|
||||
# CustomSAMLLoginView.as_view(),
|
||||
# name="saml_login",
|
||||
# ),
|
||||
# path(
|
||||
# "accounts/saml/<organization_slug>/acs/finish/",
|
||||
# TenantFinishACSView.as_view(),
|
||||
# name="saml_finish_acs",
|
||||
# ),
|
||||
# Allauth SAML endpoints for tenants (WIP)
|
||||
# path("accounts/", include("allauth.urls")),
|
||||
# path("tokens/saml", SAMLTokenValidateView.as_view(), name="token-saml"),
|
||||
path("tokens/google", GoogleSocialLoginView.as_view(), name="token-google"),
|
||||
path("tokens/github", GithubSocialLoginView.as_view(), name="token-github"),
|
||||
path("", include(router.urls)),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,6 @@
|
||||
import warnings
|
||||
|
||||
from celery import Celery, Task
|
||||
from config.env import env
|
||||
|
||||
# Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=UserWarning, module="dj_rest_auth.registration.serializers"
|
||||
)
|
||||
|
||||
BROKER_VISIBILITY_TIMEOUT = env.int("DJANGO_BROKER_VISIBILITY_TIMEOUT", default=86400)
|
||||
|
||||
celery_app = Celery("tasks")
|
||||
|
||||
@@ -2,9 +2,10 @@ import json
|
||||
import logging
|
||||
from enum import StrEnum
|
||||
|
||||
from config.env import env
|
||||
from django_guid.log_filters import CorrelationId
|
||||
|
||||
from config.env import env
|
||||
|
||||
|
||||
class BackendLogger(StrEnum):
|
||||
GUNICORN = "gunicorn"
|
||||
@@ -38,9 +39,9 @@ class NDJSONFormatter(logging.Formatter):
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
"transaction_id": (
|
||||
record.transaction_id if hasattr(record, "transaction_id") else None
|
||||
),
|
||||
"transaction_id": record.transaction_id
|
||||
if hasattr(record, "transaction_id")
|
||||
else None,
|
||||
}
|
||||
|
||||
# Add REST API extra fields
|
||||
|
||||
@@ -10,7 +10,6 @@ from config.settings.social_login import * # noqa
|
||||
SECRET_KEY = env("SECRET_KEY", default="secret")
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
|
||||
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
||||
|
||||
# Application definition
|
||||
|
||||
@@ -27,19 +26,16 @@ INSTALLED_APPS = [
|
||||
"rest_framework",
|
||||
"corsheaders",
|
||||
"drf_spectacular",
|
||||
"drf_spectacular_jsonapi",
|
||||
"django_guid",
|
||||
"rest_framework_json_api",
|
||||
"django_celery_results",
|
||||
"django_celery_beat",
|
||||
"rest_framework_simplejwt.token_blacklist",
|
||||
"allauth",
|
||||
"django.contrib.sites",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"allauth.socialaccount.providers.google",
|
||||
"allauth.socialaccount.providers.github",
|
||||
"allauth.socialaccount.providers.saml",
|
||||
"dj_rest_auth.registration",
|
||||
"rest_framework.authtoken",
|
||||
]
|
||||
@@ -115,7 +111,6 @@ SPECTACULAR_SETTINGS = {
|
||||
"PREPROCESSING_HOOKS": [
|
||||
"drf_spectacular_jsonapi.hooks.fix_nested_path_parameters",
|
||||
],
|
||||
"TITLE": "API Reference - Prowler",
|
||||
}
|
||||
|
||||
WSGI_APPLICATION = "config.wsgi.application"
|
||||
@@ -242,9 +237,4 @@ DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = env.str(
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = env.str("DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN", "")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = env.str("DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION", "")
|
||||
|
||||
# HTTP Security Headers
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
X_FRAME_OPTIONS = "DENY"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
@@ -4,7 +4,6 @@ from config.env import env
|
||||
IGNORED_EXCEPTIONS = [
|
||||
# Provider is not connected due to credentials errors
|
||||
"is not connected",
|
||||
"ProviderConnectionError",
|
||||
# Authentication Errors from AWS
|
||||
"InvalidToken",
|
||||
"AccessDeniedException",
|
||||
@@ -13,11 +12,9 @@ IGNORED_EXCEPTIONS = [
|
||||
"UnauthorizedOperation",
|
||||
"AuthFailure",
|
||||
"InvalidClientTokenId",
|
||||
"AWSInvalidProviderIdError",
|
||||
"InternalServerErrorException",
|
||||
"AccessDenied",
|
||||
"No Shodan API Key", # Shodan Check
|
||||
"RequestLimitExceeded", # For now, we don't want to log the RequestLimitExceeded errors
|
||||
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
|
||||
"ThrottlingException",
|
||||
"Rate exceeded",
|
||||
"SubscriptionRequiredException",
|
||||
@@ -36,16 +33,7 @@ IGNORED_EXCEPTIONS = [
|
||||
"ValidationException",
|
||||
"AWSSecretAccessKeyInvalidError",
|
||||
"InvalidAction",
|
||||
"InvalidRequestException",
|
||||
"RequestExpired",
|
||||
"ConnectionClosedError",
|
||||
"MaxRetryError",
|
||||
"AWSAccessKeyIDInvalidError",
|
||||
"AWSSessionTokenExpiredError",
|
||||
"EndpointConnectionError", # AWS Service is not available in a region
|
||||
# The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an
|
||||
# unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
"Pool is closed",
|
||||
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
# Authentication Errors from GCP
|
||||
"ClientAuthenticationError",
|
||||
"AuthorizationFailed",
|
||||
@@ -53,8 +41,6 @@ IGNORED_EXCEPTIONS = [
|
||||
"Permission denied to get service",
|
||||
"API has not been used in project",
|
||||
"HttpError 404 when requesting",
|
||||
"HttpError 403 when requesting",
|
||||
"HttpError 400 when requesting",
|
||||
"GCPNoAccesibleProjectsError",
|
||||
# Authentication Errors from Azure
|
||||
"ClientAuthenticationError",
|
||||
@@ -63,18 +49,19 @@ IGNORED_EXCEPTIONS = [
|
||||
"AzureNotValidClientIdError",
|
||||
"AzureNotValidClientSecretError",
|
||||
"AzureNotValidTenantIdError",
|
||||
"AzureInvalidProviderIdError",
|
||||
"AzureTenantIdAndClientSecretNotBelongingToClientIdError",
|
||||
"AzureTenantIdAndClientIdNotBelongingToClientSecretError",
|
||||
"AzureClientIdAndClientSecretNotBelongingToTenantIdError",
|
||||
"AzureHTTPResponseError",
|
||||
"Error with credentials provided",
|
||||
# AWS Service is not available in a region
|
||||
"EndpointConnectionError",
|
||||
]
|
||||
|
||||
|
||||
def before_send(event, hint):
|
||||
"""
|
||||
before_send handles the Sentry events in order to send them or not
|
||||
before_send handles the Sentry events in order to sent them or not
|
||||
"""
|
||||
# Ignore logs with the ignored_exceptions
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-objects
|
||||
@@ -82,16 +69,9 @@ def before_send(event, hint):
|
||||
log_msg = hint["log_record"].msg
|
||||
log_lvl = hint["log_record"].levelno
|
||||
|
||||
# Handle Error and Critical events and discard the rest
|
||||
if log_lvl <= 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
|
||||
return None # Explicitly return None to drop the event
|
||||
|
||||
# Ignore exceptions with the ignored_exceptions
|
||||
if "exc_info" in hint and hint["exc_info"]:
|
||||
exc_value = str(hint["exc_info"][1])
|
||||
if any(ignored in exc_value for ignored in IGNORED_EXCEPTIONS):
|
||||
return None # Explicitly return None to drop the event
|
||||
|
||||
# Handle Error events and discard the rest
|
||||
if log_lvl == 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
|
||||
return
|
||||
return event
|
||||
|
||||
|
||||
@@ -107,6 +87,4 @@ sentry_sdk.init(
|
||||
# possible.
|
||||
"continuous_profiling_auto_start": True,
|
||||
},
|
||||
attach_stacktrace=True,
|
||||
ignore_errors=IGNORED_EXCEPTIONS,
|
||||
)
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
from config.env import env
|
||||
|
||||
# Provider Oauth settings
|
||||
GOOGLE_OAUTH_CLIENT_ID = env("SOCIAL_GOOGLE_OAUTH_CLIENT_ID", default="")
|
||||
GOOGLE_OAUTH_CLIENT_SECRET = env("SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET", default="")
|
||||
GOOGLE_OAUTH_CALLBACK_URL = env("SOCIAL_GOOGLE_OAUTH_CALLBACK_URL", default="")
|
||||
# Google Oauth settings
|
||||
GOOGLE_OAUTH_CLIENT_ID = env("DJANGO_GOOGLE_OAUTH_CLIENT_ID", default="")
|
||||
GOOGLE_OAUTH_CLIENT_SECRET = env("DJANGO_GOOGLE_OAUTH_CLIENT_SECRET", default="")
|
||||
GOOGLE_OAUTH_CALLBACK_URL = env("DJANGO_GOOGLE_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
GITHUB_OAUTH_CLIENT_ID = env("SOCIAL_GITHUB_OAUTH_CLIENT_ID", default="")
|
||||
GITHUB_OAUTH_CLIENT_SECRET = env("SOCIAL_GITHUB_OAUTH_CLIENT_SECRET", default="")
|
||||
GITHUB_OAUTH_CALLBACK_URL = env("SOCIAL_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
GITHUB_OAUTH_CLIENT_ID = env("DJANGO_GITHUB_OAUTH_CLIENT_ID", default="")
|
||||
GITHUB_OAUTH_CLIENT_SECRET = env("DJANGO_GITHUB_OAUTH_CLIENT_SECRET", default="")
|
||||
GITHUB_OAUTH_CALLBACK_URL = env("DJANGO_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
# Allauth settings
|
||||
ACCOUNT_LOGIN_METHODS = {"email"} # Use Email / Password authentication
|
||||
ACCOUNT_SIGNUP_FIELDS = ["email*", "password1*", "password2*"]
|
||||
ACCOUNT_USERNAME_REQUIRED = False
|
||||
ACCOUNT_EMAIL_REQUIRED = True
|
||||
ACCOUNT_EMAIL_VERIFICATION = "none" # Do not require email confirmation
|
||||
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
|
||||
REST_AUTH = {
|
||||
@@ -24,11 +25,6 @@ SOCIALACCOUNT_EMAIL_AUTHENTICATION = True
|
||||
# Connect local account and social account if local account with that email address already exists
|
||||
SOCIALACCOUNT_EMAIL_AUTHENTICATION_AUTO_CONNECT = True
|
||||
SOCIALACCOUNT_ADAPTER = "api.adapters.ProwlerSocialAccountAdapter"
|
||||
|
||||
# SAML keys (TODO: Validate certificates)
|
||||
# SAML_PUBLIC_CERT = env("SAML_PUBLIC_CERT", default="")
|
||||
# SAML_PRIVATE_KEY = env("SAML_PRIVATE_KEY", default="")
|
||||
|
||||
SOCIALACCOUNT_PROVIDERS = {
|
||||
"google": {
|
||||
"APP": {
|
||||
@@ -54,23 +50,4 @@ SOCIALACCOUNT_PROVIDERS = {
|
||||
"read:org",
|
||||
],
|
||||
},
|
||||
"saml": {
|
||||
"use_nameid_for_email": True,
|
||||
"sp": {
|
||||
"entity_id": "urn:prowler.com:sp",
|
||||
},
|
||||
"advanced": {
|
||||
# "x509cert": SAML_PUBLIC_CERT,
|
||||
# "private_key": SAML_PRIVATE_KEY,
|
||||
# "authn_request_signed": True,
|
||||
# "want_assertion_signed": True,
|
||||
# "want_message_signed": True,
|
||||
"name_id_format": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
||||
"authn_request_signed": False,
|
||||
"logout_request_signed": False,
|
||||
"logout_response_signed": False,
|
||||
"want_assertion_encrypted": False,
|
||||
"want_name_id_encrypted": False,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -10,17 +10,12 @@ from django.urls import reverse
|
||||
from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
@@ -31,7 +26,6 @@ from api.models import (
|
||||
Scan,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
@@ -378,14 +372,8 @@ def providers_fixture(tenants_fixture):
|
||||
tenant_id=tenant.id,
|
||||
scanner_args={"key1": "value1", "key2": {"key21": "value21"}},
|
||||
)
|
||||
provider6 = Provider.objects.create(
|
||||
provider="m365",
|
||||
uid="m365.test.com",
|
||||
alias="m365_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return provider1, provider2, provider3, provider4, provider5, provider6
|
||||
return provider1, provider2, provider3, provider4, provider5
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -665,7 +653,6 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
"Description": "test description orange juice",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
muted=True,
|
||||
)
|
||||
|
||||
finding2.add_resources([resource2])
|
||||
@@ -786,131 +773,6 @@ def compliance_overviews_fixture(scans_fixture, tenants_fixture):
|
||||
return compliance_overview1, compliance_overview2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def compliance_requirements_overviews_fixture(scans_fixture, tenants_fixture):
|
||||
"""Fixture for ComplianceRequirementOverview objects used by the new ComplianceOverviewViewSet."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan1, scan2, scan3 = scans_fixture
|
||||
|
||||
# Create ComplianceRequirementOverview objects for scan1
|
||||
requirement_overview1 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-1",
|
||||
requirement_id="requirement1",
|
||||
requirement_status=StatusChoices.PASS,
|
||||
passed_checks=2,
|
||||
failed_checks=0,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview2 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-1",
|
||||
requirement_id="requirement2",
|
||||
requirement_status=StatusChoices.PASS,
|
||||
passed_checks=2,
|
||||
failed_checks=0,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview3 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-2",
|
||||
requirement_id="requirement1",
|
||||
requirement_status=StatusChoices.PASS,
|
||||
passed_checks=2,
|
||||
failed_checks=0,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview4 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-2",
|
||||
requirement_id="requirement2",
|
||||
requirement_status=StatusChoices.FAIL,
|
||||
passed_checks=1,
|
||||
failed_checks=1,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview5 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding (MANUAL)",
|
||||
region="eu-west-2",
|
||||
requirement_id="requirement3",
|
||||
requirement_status=StatusChoices.MANUAL,
|
||||
passed_checks=0,
|
||||
failed_checks=0,
|
||||
total_checks=0,
|
||||
)
|
||||
|
||||
# Create a different compliance framework for testing
|
||||
requirement_overview6 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="cis_1.4_aws",
|
||||
framework="CIS-1.4-AWS",
|
||||
version="1.4",
|
||||
description="CIS AWS Foundations Benchmark v1.4.0",
|
||||
region="eu-west-1",
|
||||
requirement_id="cis_requirement1",
|
||||
requirement_status=StatusChoices.FAIL,
|
||||
passed_checks=0,
|
||||
failed_checks=3,
|
||||
total_checks=3,
|
||||
)
|
||||
|
||||
# Create another compliance framework for testing MITRE ATT&CK
|
||||
requirement_overview7 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="mitre_attack_aws",
|
||||
framework="MITRE-ATTACK",
|
||||
version="1.0",
|
||||
description="MITRE ATT&CK",
|
||||
region="eu-west-1",
|
||||
requirement_id="mitre_requirement1",
|
||||
requirement_status=StatusChoices.FAIL,
|
||||
passed_checks=0,
|
||||
failed_checks=0,
|
||||
total_checks=0,
|
||||
)
|
||||
|
||||
return (
|
||||
requirement_overview1,
|
||||
requirement_overview2,
|
||||
requirement_overview3,
|
||||
requirement_overview4,
|
||||
requirement_overview5,
|
||||
requirement_overview6,
|
||||
requirement_overview7,
|
||||
)
|
||||
|
||||
|
||||
def get_api_tokens(
|
||||
api_client, user_email: str, user_password: str, tenant_id: str = None
|
||||
) -> tuple[str, str]:
|
||||
@@ -1015,167 +877,6 @@ def scan_summaries_fixture(tenants_fixture, providers_fixture):
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def integrations_fixture(providers_fixture):
|
||||
provider1, provider2, *_ = providers_fixture
|
||||
tenant_id = provider1.tenant_id
|
||||
integration1 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration1,
|
||||
provider=provider1,
|
||||
)
|
||||
|
||||
integration2 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider1,
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider2,
|
||||
)
|
||||
|
||||
return integration1, integration2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def backfill_scan_metadata_fixture(scans_fixture, findings_fixture):
|
||||
for scan_instance in scans_fixture:
|
||||
tenant_id = scan_instance.tenant_id
|
||||
scan_id = scan_instance.id
|
||||
backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def lighthouse_config_fixture(authenticated_client, tenants_fixture):
|
||||
return LighthouseConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
name="OpenAI",
|
||||
api_key_decoded="sk-test1234567890T3BlbkFJtest1234567890",
|
||||
model="gpt-4o",
|
||||
temperature=0,
|
||||
max_tokens=4000,
|
||||
business_context="Test business context",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_finding(authenticated_client, providers_fixture, resources_fixture):
|
||||
provider = providers_fixture[0]
|
||||
tenant_id = str(providers_fixture[0].tenant_id)
|
||||
resource = resources_fixture[0]
|
||||
scan = Scan.objects.create(
|
||||
name="latest completed scan",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
uid="test_finding_uid_1",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="test status extended ",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact extended one",
|
||||
severity=Severity.critical,
|
||||
raw_result={
|
||||
"status": Status.FAIL,
|
||||
"impact": Severity.critical,
|
||||
"severity": Severity.critical,
|
||||
},
|
||||
tags={"test": "dev-qa"},
|
||||
check_id="test_check_id",
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
|
||||
finding.add_resources([resource])
|
||||
backfill_resource_scan_summaries(tenant_id, str(scan.id))
|
||||
return finding
|
||||
|
||||
|
||||
# @pytest.fixture
|
||||
# def saml_setup(tenants_fixture):
|
||||
# tenant_id = tenants_fixture[0].id
|
||||
# domain = "example.com"
|
||||
|
||||
# SAMLDomainIndex.objects.create(email_domain=domain, tenant_id=tenant_id)
|
||||
|
||||
# metadata_xml = """<?xml version='1.0' encoding='UTF-8'?>
|
||||
# <md:EntityDescriptor entityID='TEST' xmlns:md='urn:oasis:names:tc:SAML:2.0:metadata'>
|
||||
# <md:IDPSSODescriptor WantAuthnRequestsSigned='false' protocolSupportEnumeration='urn:oasis:names:tc:SAML:2.0:protocol'>
|
||||
# <md:KeyDescriptor use='signing'>
|
||||
# <ds:KeyInfo xmlns:ds='http://www.w3.org/2000/09/xmldsig#'>
|
||||
# <ds:X509Data>
|
||||
# <ds:X509Certificate>TEST</ds:X509Certificate>
|
||||
# </ds:X509Data>
|
||||
# </ds:KeyInfo>
|
||||
# </md:KeyDescriptor>
|
||||
# <md:NameIDFormat>urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress</md:NameIDFormat>
|
||||
# <md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST' Location='https://TEST/sso/saml'/>
|
||||
# <md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect' Location='https://TEST/sso/saml'/>
|
||||
# </md:IDPSSODescriptor>
|
||||
# </md:EntityDescriptor>
|
||||
# """
|
||||
# SAMLConfiguration.objects.create(
|
||||
# tenant_id=str(tenant_id),
|
||||
# email_domain=domain,
|
||||
# metadata_xml=metadata_xml,
|
||||
# )
|
||||
|
||||
# return {
|
||||
# "email": f"user@{domain}",
|
||||
# "domain": domain,
|
||||
# "tenant_id": tenant_id,
|
||||
# }
|
||||
|
||||
|
||||
# @pytest.fixture
|
||||
# def saml_sociallogin(users_fixture):
|
||||
# user = users_fixture[0]
|
||||
# user.email = "samlsso@acme.com"
|
||||
# extra_data = {
|
||||
# "firstName": ["Test"],
|
||||
# "lastName": ["User"],
|
||||
# "organization": ["Prowler"],
|
||||
# "userType": ["member"],
|
||||
# }
|
||||
|
||||
# account = MagicMock()
|
||||
# account.provider = "saml"
|
||||
# account.extra_data = extra_data
|
||||
|
||||
# sociallogin = MagicMock(spec=SocialLogin)
|
||||
# sociallogin.account = account
|
||||
# sociallogin.user = user
|
||||
|
||||
# return sociallogin
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -3,12 +3,6 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=UserWarning, module="dj_rest_auth.registration.serializers"
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
StateChoices,
|
||||
)
|
||||
|
||||
|
||||
def backfill_resource_scan_summaries(tenant_id: str, scan_id: str):
|
||||
with rls_transaction(tenant_id):
|
||||
if ResourceScanSummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).exists():
|
||||
return {"status": "already backfilled"}
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
if not Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
id=scan_id,
|
||||
state__in=(StateChoices.COMPLETED, StateChoices.FAILED),
|
||||
).exists():
|
||||
return {"status": "scan is not completed"}
|
||||
|
||||
resource_ids_qs = (
|
||||
ResourceFindingMapping.objects.filter(
|
||||
tenant_id=tenant_id, finding__scan_id=scan_id
|
||||
)
|
||||
.values_list("resource_id", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
resource_ids = list(resource_ids_qs)
|
||||
|
||||
if not resource_ids:
|
||||
return {"status": "no resources to backfill"}
|
||||
|
||||
resources_qs = Resource.objects.filter(
|
||||
tenant_id=tenant_id, id__in=resource_ids
|
||||
).only("id", "service", "region", "type")
|
||||
|
||||
summaries = []
|
||||
for resource in resources_qs.iterator():
|
||||
summaries.append(
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
resource_id=str(resource.id),
|
||||
service=resource.service,
|
||||
region=resource.region,
|
||||
resource_type=resource.type,
|
||||
)
|
||||
)
|
||||
|
||||
for i in range(0, len(summaries), 500):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
summaries[i : i + 500], ignore_conflicts=True
|
||||
)
|
||||
|
||||
return {"status": "backfilled", "inserted": len(summaries)}
|
||||
@@ -1,9 +1,8 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import openai
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from api.models import LighthouseConfiguration, Provider
|
||||
from api.models import Provider
|
||||
from api.utils import prowler_provider_connection_test
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
@@ -40,46 +39,3 @@ def check_provider_connection(provider_id: str):
|
||||
|
||||
connection_error = f"{connection_result.error}" if connection_result.error else None
|
||||
return {"connected": connection_result.is_connected, "error": connection_error}
|
||||
|
||||
|
||||
def check_lighthouse_connection(lighthouse_config_id: str):
|
||||
"""
|
||||
Business logic to check the connection status of a Lighthouse configuration.
|
||||
|
||||
Args:
|
||||
lighthouse_config_id (str): The primary key of the LighthouseConfiguration instance to check.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing:
|
||||
- 'connected' (bool): Indicates whether the connection is successful.
|
||||
- 'error' (str or None): The error message if the connection failed, otherwise `None`.
|
||||
- 'available_models' (list): List of available models if connection is successful.
|
||||
|
||||
Raises:
|
||||
Model.DoesNotExist: If the lighthouse configuration does not exist.
|
||||
"""
|
||||
lighthouse_config = LighthouseConfiguration.objects.get(pk=lighthouse_config_id)
|
||||
|
||||
if not lighthouse_config.api_key_decoded:
|
||||
lighthouse_config.is_active = False
|
||||
lighthouse_config.save()
|
||||
return {
|
||||
"connected": False,
|
||||
"error": "API key is invalid or missing.",
|
||||
"available_models": [],
|
||||
}
|
||||
|
||||
try:
|
||||
client = openai.OpenAI(api_key=lighthouse_config.api_key_decoded)
|
||||
models = client.models.list()
|
||||
lighthouse_config.is_active = True
|
||||
lighthouse_config.save()
|
||||
return {
|
||||
"connected": True,
|
||||
"error": None,
|
||||
"available_models": [model.id for model in models.data],
|
||||
}
|
||||
except Exception as e:
|
||||
lighthouse_config.is_active = False
|
||||
lighthouse_config.save()
|
||||
return {"connected": False, "error": str(e), "available_models": []}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import re
|
||||
import zipfile
|
||||
|
||||
import boto3
|
||||
@@ -14,42 +13,6 @@ from prowler.config.config import (
|
||||
json_ocsf_file_suffix,
|
||||
output_file_timestamp,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
|
||||
KubernetesISO27001,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_m365 import M365ISO27001
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
AzureMitreAttack,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_aws import (
|
||||
ProwlerThreatScoreAWS,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_azure import (
|
||||
ProwlerThreatScoreAzure,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_gcp import (
|
||||
ProwlerThreatScoreGCP,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_m365 import (
|
||||
ProwlerThreatScoreM365,
|
||||
)
|
||||
from prowler.lib.outputs.csv.csv import CSV
|
||||
from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
@@ -57,45 +20,6 @@ from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
COMPLIANCE_CLASS_MAP = {
|
||||
"aws": [
|
||||
(lambda name: name.startswith("cis_"), AWSCIS),
|
||||
(lambda name: name == "mitre_attack_aws", AWSMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), AWSENS),
|
||||
(
|
||||
lambda name: name.startswith("aws_well_architected_framework"),
|
||||
AWSWellArchitected,
|
||||
),
|
||||
(lambda name: name.startswith("iso27001_"), AWSISO27001),
|
||||
(lambda name: name.startswith("kisa"), AWSKISAISMSP),
|
||||
(lambda name: name == "prowler_threatscore_aws", ProwlerThreatScoreAWS),
|
||||
],
|
||||
"azure": [
|
||||
(lambda name: name.startswith("cis_"), AzureCIS),
|
||||
(lambda name: name == "mitre_attack_azure", AzureMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), AzureENS),
|
||||
(lambda name: name.startswith("iso27001_"), AzureISO27001),
|
||||
(lambda name: name == "prowler_threatscore_azure", ProwlerThreatScoreAzure),
|
||||
],
|
||||
"gcp": [
|
||||
(lambda name: name.startswith("cis_"), GCPCIS),
|
||||
(lambda name: name == "mitre_attack_gcp", GCPMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), GCPENS),
|
||||
(lambda name: name.startswith("iso27001_"), GCPISO27001),
|
||||
(lambda name: name == "prowler_threatscore_gcp", ProwlerThreatScoreGCP),
|
||||
],
|
||||
"kubernetes": [
|
||||
(lambda name: name.startswith("cis_"), KubernetesCIS),
|
||||
(lambda name: name.startswith("iso27001_"), KubernetesISO27001),
|
||||
],
|
||||
"m365": [
|
||||
(lambda name: name.startswith("cis_"), M365CIS),
|
||||
(lambda name: name == "prowler_threatscore_m365", ProwlerThreatScoreM365),
|
||||
(lambda name: name.startswith("iso27001_"), M365ISO27001),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
# Predefined mapping for output formats and their configurations
|
||||
OUTPUT_FORMATS_MAPPING = {
|
||||
"csv": {
|
||||
@@ -119,17 +43,13 @@ def _compress_output_files(output_directory: str) -> str:
|
||||
str: The full path to the newly created ZIP archive.
|
||||
"""
|
||||
zip_path = f"{output_directory}.zip"
|
||||
parent_dir = os.path.dirname(output_directory)
|
||||
zip_path_abs = os.path.abspath(zip_path)
|
||||
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for foldername, _, filenames in os.walk(parent_dir):
|
||||
for filename in filenames:
|
||||
file_path = os.path.join(foldername, filename)
|
||||
if os.path.abspath(file_path) == zip_path_abs:
|
||||
continue
|
||||
arcname = os.path.relpath(file_path, start=parent_dir)
|
||||
zipf.write(file_path, arcname)
|
||||
for suffix in [config["suffix"] for config in OUTPUT_FORMATS_MAPPING.values()]:
|
||||
zipf.write(
|
||||
f"{output_directory}{suffix}",
|
||||
f"output/{output_directory.split('/')[-1]}{suffix}",
|
||||
)
|
||||
|
||||
return zip_path
|
||||
|
||||
@@ -182,38 +102,25 @@ def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str:
|
||||
Raises:
|
||||
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
|
||||
"""
|
||||
bucket = base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET
|
||||
if not bucket:
|
||||
return None
|
||||
if not base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET:
|
||||
return
|
||||
|
||||
try:
|
||||
s3 = get_s3_client()
|
||||
|
||||
# Upload the ZIP file (outputs) to the S3 bucket
|
||||
zip_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
s3_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
s3.upload_file(
|
||||
Filename=zip_path,
|
||||
Bucket=bucket,
|
||||
Key=zip_key,
|
||||
Bucket=base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET,
|
||||
Key=s3_key,
|
||||
)
|
||||
|
||||
# Upload the compliance directory to the S3 bucket
|
||||
compliance_dir = os.path.join(os.path.dirname(zip_path), "compliance")
|
||||
for filename in os.listdir(compliance_dir):
|
||||
local_path = os.path.join(compliance_dir, filename)
|
||||
if not os.path.isfile(local_path):
|
||||
continue
|
||||
file_key = f"{tenant_id}/{scan_id}/compliance/{filename}"
|
||||
s3.upload_file(Filename=local_path, Bucket=bucket, Key=file_key)
|
||||
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{zip_key}"
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{s3_key}"
|
||||
except (ClientError, NoCredentialsError, ParamValidationError, ValueError) as e:
|
||||
logger.error(f"S3 upload failed: {str(e)}")
|
||||
|
||||
|
||||
def _generate_output_directory(
|
||||
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
|
||||
) -> tuple[str, str]:
|
||||
) -> str:
|
||||
"""
|
||||
Generate a file system path for the output directory of a prowler scan.
|
||||
|
||||
@@ -238,22 +145,12 @@ def _generate_output_directory(
|
||||
|
||||
Example:
|
||||
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
|
||||
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
# Sanitize the prowler provider name to ensure it is a valid directory name
|
||||
prowler_provider_sanitized = re.sub(r"[^\w\-]", "-", prowler_provider)
|
||||
|
||||
path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/prowler-output-"
|
||||
f"{prowler_provider_sanitized}-{output_file_timestamp}"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
compliance_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/compliance/prowler-output-"
|
||||
f"{prowler_provider_sanitized}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path
|
||||
return path
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
@@ -7,27 +6,24 @@ from celery.utils.log import get_task_logger
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
from django.db import IntegrityError, OperationalError
|
||||
from django.db.models import Case, Count, IntegerField, Sum, When
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_utils import create_objects_in_batches, rls_transaction
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceScanSummary,
|
||||
ResourceTag,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
)
|
||||
from api.models import StatusChoices as FindingStatus
|
||||
from api.utils import initialize_prowler_provider, return_prowler_provider
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.outputs.finding import Finding as ProwlerFinding
|
||||
from prowler.lib.scan.scan import Scan as ProwlerScan
|
||||
@@ -120,11 +116,10 @@ def perform_prowler_scan(
|
||||
ValueError: If the provider cannot be connected.
|
||||
|
||||
"""
|
||||
check_status_by_region = {}
|
||||
exception = None
|
||||
unique_resources = set()
|
||||
scan_resource_cache: set[tuple[str, str, str, str]] = set()
|
||||
start_time = time.time()
|
||||
exc = None
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
@@ -140,7 +135,7 @@ def perform_prowler_scan(
|
||||
provider_instance.connected = True
|
||||
except Exception as e:
|
||||
provider_instance.connected = False
|
||||
exc = ProviderConnectionError(
|
||||
raise ValueError(
|
||||
f"Provider {provider_instance.provider} is not connected: {e}"
|
||||
)
|
||||
finally:
|
||||
@@ -149,12 +144,6 @@ def perform_prowler_scan(
|
||||
)
|
||||
provider_instance.save()
|
||||
|
||||
# If the provider is not connected, raise an exception outside the transaction.
|
||||
# If raised within the transaction, the transaction will be rolled back and the provider will not be marked
|
||||
# as not connected.
|
||||
if exc:
|
||||
raise exc
|
||||
|
||||
prowler_scan = ProwlerScan(provider=prowler_provider, checks=checks_to_execute)
|
||||
|
||||
resource_cache = {}
|
||||
@@ -202,17 +191,6 @@ def perform_prowler_scan(
|
||||
if resource_instance.type != finding.resource_type:
|
||||
resource_instance.type = finding.resource_type
|
||||
updated_fields.append("type")
|
||||
if resource_instance.metadata != finding.resource_metadata:
|
||||
resource_instance.metadata = json.dumps(
|
||||
finding.resource_metadata, cls=CustomEncoder
|
||||
)
|
||||
updated_fields.append("metadata")
|
||||
if resource_instance.details != finding.resource_details:
|
||||
resource_instance.details = finding.resource_details
|
||||
updated_fields.append("details")
|
||||
if resource_instance.partition != finding.partition:
|
||||
resource_instance.partition = finding.partition
|
||||
updated_fields.append("partition")
|
||||
if updated_fields:
|
||||
with rls_transaction(tenant_id):
|
||||
resource_instance.save(update_fields=updated_fields)
|
||||
@@ -289,20 +267,18 @@ def perform_prowler_scan(
|
||||
check_id=finding.check_id,
|
||||
scan=scan_instance,
|
||||
first_seen_at=last_first_seen_at,
|
||||
muted=finding.muted,
|
||||
compliance=finding.compliance,
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
# Update scan resource summaries
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
str(resource_instance.id),
|
||||
resource_instance.service,
|
||||
resource_instance.region,
|
||||
resource_instance.type,
|
||||
)
|
||||
)
|
||||
# Update compliance data if applicable
|
||||
if finding.status.value == "MUTED":
|
||||
continue
|
||||
|
||||
region_dict = check_status_by_region.setdefault(finding.region, {})
|
||||
current_status = region_dict.get(finding.check_id)
|
||||
if current_status == "FAIL":
|
||||
continue
|
||||
region_dict[finding.check_id] = finding.status.value
|
||||
|
||||
# Update scan progress
|
||||
with rls_transaction(tenant_id):
|
||||
@@ -323,33 +299,66 @@ def perform_prowler_scan(
|
||||
scan_instance.unique_resource_count = len(unique_resources)
|
||||
scan_instance.save()
|
||||
|
||||
if exception is None:
|
||||
try:
|
||||
regions = prowler_provider.get_regions()
|
||||
except AttributeError:
|
||||
regions = set()
|
||||
|
||||
compliance_template = PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE[
|
||||
provider_instance.provider
|
||||
]
|
||||
compliance_overview_by_region = {
|
||||
region: deepcopy(compliance_template) for region in regions
|
||||
}
|
||||
|
||||
for region, check_status in check_status_by_region.items():
|
||||
compliance_data = compliance_overview_by_region.setdefault(
|
||||
region, deepcopy(compliance_template)
|
||||
)
|
||||
for check_name, status in check_status.items():
|
||||
generate_scan_compliance(
|
||||
compliance_data,
|
||||
provider_instance.provider,
|
||||
check_name,
|
||||
status,
|
||||
)
|
||||
|
||||
# Prepare compliance overview objects
|
||||
compliance_overview_objects = []
|
||||
for region, compliance_data in compliance_overview_by_region.items():
|
||||
for compliance_id, compliance in compliance_data.items():
|
||||
compliance_overview_objects.append(
|
||||
ComplianceOverview(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan_instance,
|
||||
region=region,
|
||||
compliance_id=compliance_id,
|
||||
framework=compliance["framework"],
|
||||
version=compliance["version"],
|
||||
description=compliance["description"],
|
||||
requirements=compliance["requirements"],
|
||||
requirements_passed=compliance["requirements_status"]["passed"],
|
||||
requirements_failed=compliance["requirements_status"]["failed"],
|
||||
requirements_manual=compliance["requirements_status"]["manual"],
|
||||
total_requirements=compliance["total_requirements"],
|
||||
)
|
||||
)
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceOverview.objects.bulk_create(
|
||||
compliance_overview_objects, batch_size=100
|
||||
)
|
||||
except Exception as overview_exception:
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.capture_exception(overview_exception)
|
||||
logger.error(
|
||||
f"Error storing compliance overview for scan {scan_id}: {overview_exception}"
|
||||
)
|
||||
if exception is not None:
|
||||
raise exception
|
||||
|
||||
try:
|
||||
resource_scan_summaries = [
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
resource_id=resource_id,
|
||||
service=service,
|
||||
region=region,
|
||||
resource_type=resource_type,
|
||||
)
|
||||
for resource_id, service, region, resource_type in scan_resource_cache
|
||||
]
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
resource_scan_summaries, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
except Exception as filter_exception:
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.capture_exception(filter_exception)
|
||||
logger.error(
|
||||
f"Error storing filter values for scan {scan_id}: {filter_exception}"
|
||||
)
|
||||
|
||||
serializer = ScanTaskSerializer(instance=scan_instance)
|
||||
return serializer.data
|
||||
|
||||
@@ -393,21 +402,21 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
).annotate(
|
||||
fail=Sum(
|
||||
Case(
|
||||
When(status="FAIL", muted=False, then=1),
|
||||
When(status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
_pass=Sum(
|
||||
Case(
|
||||
When(status="PASS", muted=False, then=1),
|
||||
When(status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_count=Sum(
|
||||
muted=Sum(
|
||||
Case(
|
||||
When(muted=True, then=1),
|
||||
When(status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
@@ -415,63 +424,63 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
total=Count("id"),
|
||||
new=Sum(
|
||||
Case(
|
||||
When(delta="new", muted=False, then=1),
|
||||
When(delta="new", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", muted=False, then=1),
|
||||
When(delta="changed", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
unchanged=Sum(
|
||||
Case(
|
||||
When(delta__isnull=True, muted=False, then=1),
|
||||
When(delta__isnull=True, then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
fail_new=Sum(
|
||||
Case(
|
||||
When(delta="new", status="FAIL", muted=False, then=1),
|
||||
When(delta="new", status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
fail_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", status="FAIL", muted=False, then=1),
|
||||
When(delta="changed", status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
pass_new=Sum(
|
||||
Case(
|
||||
When(delta="new", status="PASS", muted=False, then=1),
|
||||
When(delta="new", status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
pass_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", status="PASS", muted=False, then=1),
|
||||
When(delta="changed", status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_new=Sum(
|
||||
Case(
|
||||
When(delta="new", muted=True, then=1),
|
||||
When(delta="new", status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", muted=True, then=1),
|
||||
When(delta="changed", status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
@@ -489,7 +498,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
region=agg["resources__region"],
|
||||
fail=agg["fail"],
|
||||
_pass=agg["_pass"],
|
||||
muted=agg["muted_count"],
|
||||
muted=agg["muted"],
|
||||
total=agg["total"],
|
||||
new=agg["new"],
|
||||
changed=agg["changed"],
|
||||
@@ -504,114 +513,3 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
for agg in aggregation
|
||||
}
|
||||
ScanSummary.objects.bulk_create(scan_aggregations, batch_size=3000)
|
||||
|
||||
|
||||
def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Create detailed compliance requirement overview records for a scan.
|
||||
|
||||
This function processes the compliance data collected during a scan and creates
|
||||
individual records for each compliance requirement in each region. These detailed
|
||||
records provide a granular view of compliance status.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The ID of the tenant for which to create records.
|
||||
scan_id (str): The ID of the scan for which to create records.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the number of requirements created and the regions processed.
|
||||
|
||||
Raises:
|
||||
ValidationError: If tenant_id is not a valid UUID.
|
||||
"""
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
scan_instance = Scan.objects.get(pk=scan_id)
|
||||
provider_instance = scan_instance.provider
|
||||
prowler_provider = return_prowler_provider(provider_instance)
|
||||
|
||||
# Get check status data by region from findings
|
||||
check_status_by_region = {}
|
||||
with rls_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(scan_id=scan_id, muted=False)
|
||||
for finding in findings:
|
||||
# Get region from resources
|
||||
for resource in finding.resources.all():
|
||||
region = resource.region
|
||||
region_dict = check_status_by_region.setdefault(region, {})
|
||||
current_status = region_dict.get(finding.check_id)
|
||||
if current_status == "FAIL":
|
||||
continue
|
||||
region_dict[finding.check_id] = finding.status
|
||||
|
||||
try:
|
||||
# Try to get regions from provider
|
||||
regions = prowler_provider.get_regions()
|
||||
except (AttributeError, Exception):
|
||||
# If not available, use regions from findings
|
||||
regions = set(check_status_by_region.keys())
|
||||
|
||||
# Get compliance template for the provider
|
||||
compliance_template = PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE[
|
||||
provider_instance.provider
|
||||
]
|
||||
|
||||
# Create compliance data by region
|
||||
compliance_overview_by_region = {
|
||||
region: deepcopy(compliance_template) for region in regions
|
||||
}
|
||||
|
||||
# Apply check statuses to compliance data
|
||||
for region, check_status in check_status_by_region.items():
|
||||
compliance_data = compliance_overview_by_region.setdefault(
|
||||
region, deepcopy(compliance_template)
|
||||
)
|
||||
for check_name, status in check_status.items():
|
||||
generate_scan_compliance(
|
||||
compliance_data,
|
||||
provider_instance.provider,
|
||||
check_name,
|
||||
status,
|
||||
)
|
||||
|
||||
# Prepare compliance requirement objects
|
||||
compliance_requirement_objects = []
|
||||
for region, compliance_data in compliance_overview_by_region.items():
|
||||
for compliance_id, compliance in compliance_data.items():
|
||||
# Create an overview record for each requirement within each compliance framework
|
||||
for requirement_id, requirement in compliance["requirements"].items():
|
||||
compliance_requirement_objects.append(
|
||||
ComplianceRequirementOverview(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan_instance,
|
||||
region=region,
|
||||
compliance_id=compliance_id,
|
||||
framework=compliance["framework"],
|
||||
version=compliance["version"],
|
||||
requirement_id=requirement_id,
|
||||
description=requirement["description"],
|
||||
passed_checks=requirement["checks_status"]["pass"],
|
||||
failed_checks=requirement["checks_status"]["fail"],
|
||||
total_checks=requirement["checks_status"]["total"],
|
||||
requirement_status=requirement["status"],
|
||||
)
|
||||
)
|
||||
|
||||
# Bulk create requirement records
|
||||
create_objects_in_batches(
|
||||
tenant_id, ComplianceRequirementOverview, compliance_requirement_objects
|
||||
)
|
||||
|
||||
return {
|
||||
"requirements_created": len(compliance_requirement_objects),
|
||||
"regions_processed": list(regions),
|
||||
"compliance_frameworks": (
|
||||
list(compliance_overview_by_region.get(list(regions)[0], {}).keys())
|
||||
if regions
|
||||
else []
|
||||
),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating compliance requirements for scan {scan_id}: {e}")
|
||||
raise e
|
||||
|
||||
@@ -7,56 +7,27 @@ from celery.utils.log import get_task_logger
|
||||
from config.celery import RLSTask
|
||||
from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIRECTORY
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
from tasks.jobs.connection import check_lighthouse_connection, check_provider_connection
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
from tasks.jobs.deletion import delete_provider, delete_tenant
|
||||
from tasks.jobs.export import (
|
||||
COMPLIANCE_CLASS_MAP,
|
||||
OUTPUT_FORMATS_MAPPING,
|
||||
_compress_output_files,
|
||||
_generate_output_directory,
|
||||
_upload_to_s3,
|
||||
)
|
||||
from tasks.jobs.scan import (
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
)
|
||||
from tasks.jobs.scan import aggregate_findings, perform_prowler_scan
|
||||
from tasks.utils import batched, get_next_execution_datetime
|
||||
|
||||
from api.compliance import get_compliance_frameworks
|
||||
from api.db_utils import rls_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.models import Finding, Provider, Scan, ScanSummary, StateChoices
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Helper function to perform tasks after a scan is completed.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant ID under which the scan was performed.
|
||||
scan_id (str): The ID of the scan that was performed.
|
||||
provider_id (str): The primary key of the Provider instance that was scanned.
|
||||
"""
|
||||
create_compliance_requirements_task.apply_async(
|
||||
kwargs={"tenant_id": tenant_id, "scan_id": scan_id}
|
||||
)
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="provider-connection-check")
|
||||
@set_tenant
|
||||
def check_provider_connection_task(provider_id: str):
|
||||
@@ -123,7 +94,12 @@ def perform_scan_task(
|
||||
checks_to_execute=checks_to_execute,
|
||||
)
|
||||
|
||||
_perform_scan_complete_tasks(tenant_id, scan_id, provider_id)
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id, scan_id),
|
||||
generate_outputs.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
|
||||
return result
|
||||
|
||||
@@ -228,12 +204,17 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
)
|
||||
|
||||
_perform_scan_complete_tasks(tenant_id, str(scan_instance.id), provider_id)
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id, scan_instance.id),
|
||||
generate_outputs.si(
|
||||
scan_id=str(scan_instance.id), provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@shared_task(name="scan-summary", queue="overview")
|
||||
@shared_task(name="scan-summary")
|
||||
def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
@@ -249,7 +230,7 @@ def delete_tenant_task(tenant_id: str):
|
||||
queue="scan-reports",
|
||||
)
|
||||
@set_tenant(keep_tenant=True)
|
||||
def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
|
||||
"""
|
||||
Process findings in batches and generate output files in multiple formats.
|
||||
|
||||
@@ -265,158 +246,84 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
scan_id (str): The scan identifier.
|
||||
provider_id (str): The provider_id id to be used in generating outputs.
|
||||
"""
|
||||
# Check if the scan has findings
|
||||
if not ScanSummary.objects.filter(scan_id=scan_id).exists():
|
||||
logger.info(f"No findings found for scan {scan_id}")
|
||||
return {"upload": False}
|
||||
# Initialize the prowler provider
|
||||
prowler_provider = initialize_prowler_provider(Provider.objects.get(id=provider_id))
|
||||
|
||||
provider_obj = Provider.objects.get(id=provider_id)
|
||||
prowler_provider = initialize_prowler_provider(provider_obj)
|
||||
provider_uid = provider_obj.uid
|
||||
provider_type = provider_obj.provider
|
||||
# Get the provider UID
|
||||
provider_uid = Provider.objects.get(id=provider_id).uid
|
||||
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
frameworks_avail = get_compliance_frameworks(provider_type)
|
||||
out_dir, comp_dir = _generate_output_directory(
|
||||
# Generate and ensure the output directory exists
|
||||
output_directory = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
)
|
||||
|
||||
def get_writer(writer_map, name, factory, is_last):
|
||||
"""
|
||||
Return existing writer_map[name] or create via factory().
|
||||
In both cases set `.close_file = is_last`.
|
||||
"""
|
||||
initialization = False
|
||||
if name not in writer_map:
|
||||
writer_map[name] = factory()
|
||||
initialization = True
|
||||
w = writer_map[name]
|
||||
w.close_file = is_last
|
||||
|
||||
return w, initialization
|
||||
|
||||
# Define auxiliary variables
|
||||
output_writers = {}
|
||||
compliance_writers = {}
|
||||
|
||||
scan_summary = FindingOutput._transform_findings_stats(
|
||||
ScanSummary.objects.filter(scan_id=scan_id)
|
||||
)
|
||||
|
||||
qs = Finding.all_objects.filter(scan_id=scan_id).order_by("uid").iterator()
|
||||
for batch, is_last in batched(qs, DJANGO_FINDINGS_BATCH_SIZE):
|
||||
fos = [FindingOutput.transform_api_finding(f, prowler_provider) for f in batch]
|
||||
# Retrieve findings queryset
|
||||
findings_qs = Finding.all_objects.filter(scan_id=scan_id).order_by("uid")
|
||||
|
||||
# Outputs
|
||||
for mode, cfg in OUTPUT_FORMATS_MAPPING.items():
|
||||
cls = cfg["class"]
|
||||
suffix = cfg["suffix"]
|
||||
extra = cfg.get("kwargs", {}).copy()
|
||||
# Process findings in batches
|
||||
for batch, is_last_batch in batched(
|
||||
findings_qs.iterator(), DJANGO_FINDINGS_BATCH_SIZE
|
||||
):
|
||||
finding_outputs = [
|
||||
FindingOutput.transform_api_finding(finding, prowler_provider)
|
||||
for finding in batch
|
||||
]
|
||||
|
||||
# Generate output files
|
||||
for mode, config in OUTPUT_FORMATS_MAPPING.items():
|
||||
kwargs = dict(config.get("kwargs", {}))
|
||||
if mode == "html":
|
||||
extra.update(provider=prowler_provider, stats=scan_summary)
|
||||
kwargs["provider"] = prowler_provider
|
||||
kwargs["stats"] = scan_summary
|
||||
|
||||
writer, initialization = get_writer(
|
||||
output_writers,
|
||||
cls,
|
||||
lambda cls=cls, fos=fos, suffix=suffix: cls(
|
||||
findings=fos,
|
||||
file_path=out_dir,
|
||||
file_extension=suffix,
|
||||
writer_class = config["class"]
|
||||
if writer_class in output_writers:
|
||||
writer = output_writers[writer_class]
|
||||
writer.transform(finding_outputs)
|
||||
writer.close_file = is_last_batch
|
||||
else:
|
||||
writer = writer_class(
|
||||
findings=finding_outputs,
|
||||
file_path=output_directory,
|
||||
file_extension=config["suffix"],
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos)
|
||||
writer.batch_write_data_to_file(**extra)
|
||||
writer._data.clear()
|
||||
)
|
||||
writer.close_file = is_last_batch
|
||||
output_writers[writer_class] = writer
|
||||
|
||||
# Compliance CSVs
|
||||
for name in frameworks_avail:
|
||||
compliance_obj = frameworks_bulk[name]
|
||||
# Write the current batch using the writer
|
||||
writer.batch_write_data_to_file(**kwargs)
|
||||
|
||||
klass = GenericCompliance
|
||||
for condition, cls in COMPLIANCE_CLASS_MAP.get(provider_type, []):
|
||||
if condition(name):
|
||||
klass = cls
|
||||
break
|
||||
# TODO: Refactor the output classes to avoid this manual reset
|
||||
writer._data = []
|
||||
|
||||
filename = f"{comp_dir}_{name}.csv"
|
||||
# Compress output files
|
||||
output_directory = _compress_output_files(output_directory)
|
||||
|
||||
writer, initialization = get_writer(
|
||||
compliance_writers,
|
||||
name,
|
||||
lambda klass=klass, fos=fos: klass(
|
||||
findings=fos,
|
||||
compliance=compliance_obj,
|
||||
file_path=filename,
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos, compliance_obj, name)
|
||||
writer.batch_write_data_to_file()
|
||||
writer._data.clear()
|
||||
# Save to configured storage
|
||||
uploaded = _upload_to_s3(tenant_id, output_directory, scan_id)
|
||||
|
||||
compressed = _compress_output_files(out_dir)
|
||||
upload_uri = _upload_to_s3(tenant_id, compressed, scan_id)
|
||||
|
||||
if upload_uri:
|
||||
if uploaded:
|
||||
# Remove the local files after upload
|
||||
try:
|
||||
rmtree(Path(compressed).parent, ignore_errors=True)
|
||||
except Exception as e:
|
||||
rmtree(Path(output_directory).parent, ignore_errors=True)
|
||||
except FileNotFoundError as e:
|
||||
logger.error(f"Error deleting output files: {e}")
|
||||
final_location, did_upload = upload_uri, True
|
||||
|
||||
output_directory = uploaded
|
||||
uploaded = True
|
||||
else:
|
||||
final_location, did_upload = compressed, False
|
||||
uploaded = False
|
||||
|
||||
Scan.all_objects.filter(id=scan_id).update(output_location=final_location)
|
||||
logger.info(f"Scan outputs at {final_location}")
|
||||
return {"upload": did_upload}
|
||||
# Update the scan instance with the output path
|
||||
Scan.all_objects.filter(id=scan_id).update(output_location=output_directory)
|
||||
|
||||
logger.info(f"Scan output files generated, output location: {output_directory}")
|
||||
|
||||
@shared_task(name="backfill-scan-resource-summaries", queue="backfill")
|
||||
def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Tries to backfill the resource scan summaries table for a given scan.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier.
|
||||
scan_id (str): The scan identifier.
|
||||
"""
|
||||
return backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="overview")
|
||||
def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates detailed compliance requirement records for a scan.
|
||||
|
||||
This task processes the compliance data collected during a scan and creates
|
||||
individual records for each compliance requirement in each region. These detailed
|
||||
records provide a granular view of compliance status.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant ID for which to create records.
|
||||
scan_id (str): The ID of the scan for which to create records.
|
||||
"""
|
||||
return create_compliance_requirements(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="lighthouse-connection-check")
|
||||
@set_tenant
|
||||
def check_lighthouse_connection_task(lighthouse_config_id: str, tenant_id: str = None):
|
||||
"""
|
||||
Task to check the connection status of a Lighthouse configuration.
|
||||
|
||||
Args:
|
||||
lighthouse_config_id (str): The primary key of the LighthouseConfiguration instance to check.
|
||||
tenant_id (str): The tenant ID for the task.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing:
|
||||
- 'connected' (bool): Indicates whether the connection is successful.
|
||||
- 'error' (str or None): The error message if the connection failed, otherwise `None`.
|
||||
- 'available_models' (list): List of available models if connection is successful.
|
||||
"""
|
||||
return check_lighthouse_connection(lighthouse_config_id=lighthouse_config_id)
|
||||
return {"upload": uploaded}
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
|
||||
from api.models import ResourceScanSummary, Scan, StateChoices
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestBackfillResourceScanSummaries:
|
||||
@pytest.fixture(scope="function")
|
||||
def resource_scan_summary_data(self, scans_fixture):
|
||||
scan = scans_fixture[0]
|
||||
return ResourceScanSummary.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
scan_id=scan.id,
|
||||
resource_id=str(uuid4()),
|
||||
service="aws",
|
||||
region="us-east-1",
|
||||
resource_type="instance",
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def get_not_completed_scans(self, providers_fixture):
|
||||
provider_id = providers_fixture[0].id
|
||||
tenant_id = providers_fixture[0].tenant_id
|
||||
scan_1 = Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.EXECUTING,
|
||||
provider_id=provider_id,
|
||||
)
|
||||
scan_2 = Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.AVAILABLE,
|
||||
provider_id=provider_id,
|
||||
)
|
||||
return scan_1, scan_2
|
||||
|
||||
def test_already_backfilled(self, resource_scan_summary_data):
|
||||
tenant_id = resource_scan_summary_data.tenant_id
|
||||
scan_id = resource_scan_summary_data.scan_id
|
||||
|
||||
result = backfill_resource_scan_summaries(tenant_id, scan_id)
|
||||
|
||||
assert result == {"status": "already backfilled"}
|
||||
|
||||
def test_not_completed_scan(self, get_not_completed_scans):
|
||||
for scan_instance in get_not_completed_scans:
|
||||
tenant_id = scan_instance.tenant_id
|
||||
scan_id = scan_instance.id
|
||||
result = backfill_resource_scan_summaries(tenant_id, scan_id)
|
||||
|
||||
assert result == {"status": "scan is not completed"}
|
||||
|
||||
def test_successful_backfill_inserts_one_summary(
|
||||
self, resources_fixture, findings_fixture
|
||||
):
|
||||
tenant_id = findings_fixture[0].tenant_id
|
||||
scan_id = findings_fixture[0].scan_id
|
||||
|
||||
# This scan affects the first two resources
|
||||
resources = resources_fixture[:2]
|
||||
|
||||
result = backfill_resource_scan_summaries(tenant_id, scan_id)
|
||||
assert result == {"status": "backfilled", "inserted": len(resources)}
|
||||
|
||||
# Verify correct values
|
||||
summaries = ResourceScanSummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
)
|
||||
assert summaries.count() == len(resources)
|
||||
for resource in resources:
|
||||
summary = summaries.get(resource_id=resource.id)
|
||||
assert summary.resource_id == resource.id
|
||||
assert summary.service == resource.service
|
||||
assert summary.region == resource.region
|
||||
assert summary.resource_type == resource.type
|
||||
@@ -55,22 +55,3 @@ class TestScheduleProviderScan:
|
||||
assert "There is already a scheduled scan for this provider." in str(
|
||||
exc_info.value
|
||||
)
|
||||
|
||||
def test_remove_periodic_task(self, providers_fixture):
|
||||
provider_instance = providers_fixture[0]
|
||||
|
||||
assert Scan.objects.count() == 0
|
||||
with patch("tasks.tasks.perform_scheduled_scan_task.apply_async"):
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
assert Scan.objects.count() == 1
|
||||
scan = Scan.objects.first()
|
||||
periodic_task = scan.scheduler_task
|
||||
assert periodic_task is not None
|
||||
|
||||
periodic_task.delete()
|
||||
|
||||
scan.refresh_from_db()
|
||||
# Assert the scan still exists but its scheduler_task is set to None
|
||||
# Otherwise, Scan.DoesNotExist would be raised
|
||||
assert Scan.objects.get(id=scan.id).scheduler_task is None
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
from tasks.jobs.connection import check_lighthouse_connection, check_provider_connection
|
||||
|
||||
from api.models import LighthouseConfiguration, Provider
|
||||
from api.models import Provider
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -70,60 +70,3 @@ def test_check_provider_connection_exception(
|
||||
|
||||
mock_provider_instance.save.assert_called_once()
|
||||
assert mock_provider_instance.connected is False
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lighthouse_data",
|
||||
[
|
||||
{
|
||||
"name": "OpenAI",
|
||||
"api_key_decoded": "sk-test1234567890T3BlbkFJtest1234567890",
|
||||
"model": "gpt-4o",
|
||||
"temperature": 0,
|
||||
"max_tokens": 4000,
|
||||
"business_context": "Test business context",
|
||||
"is_active": True,
|
||||
},
|
||||
],
|
||||
)
|
||||
@patch("tasks.jobs.connection.openai.OpenAI")
|
||||
@pytest.mark.django_db
|
||||
def test_check_lighthouse_connection(
|
||||
mock_openai_client, tenants_fixture, lighthouse_data
|
||||
):
|
||||
lighthouse_config = LighthouseConfiguration.objects.create(
|
||||
**lighthouse_data, tenant_id=tenants_fixture[0].id
|
||||
)
|
||||
|
||||
mock_models = MagicMock()
|
||||
mock_models.data = [MagicMock(id="gpt-4o"), MagicMock(id="gpt-4o-mini")]
|
||||
mock_openai_client.return_value.models.list.return_value = mock_models
|
||||
|
||||
result = check_lighthouse_connection(
|
||||
lighthouse_config_id=str(lighthouse_config.id),
|
||||
)
|
||||
lighthouse_config.refresh_from_db()
|
||||
|
||||
mock_openai_client.assert_called_once_with(
|
||||
api_key=lighthouse_data["api_key_decoded"]
|
||||
)
|
||||
assert lighthouse_config.is_active is True
|
||||
assert result["connected"] is True
|
||||
assert result["error"] is None
|
||||
assert result["available_models"] == ["gpt-4o", "gpt-4o-mini"]
|
||||
|
||||
|
||||
@patch("tasks.jobs.connection.LighthouseConfiguration.objects.get")
|
||||
@pytest.mark.django_db
|
||||
def test_check_lighthouse_connection_missing_api_key(mock_lighthouse_get):
|
||||
mock_lighthouse_instance = MagicMock()
|
||||
mock_lighthouse_instance.api_key_decoded = None
|
||||
mock_lighthouse_get.return_value = mock_lighthouse_instance
|
||||
|
||||
result = check_lighthouse_connection("lighthouse_config_id")
|
||||
|
||||
assert result["connected"] is False
|
||||
assert result["error"] == "API key is invalid or missing."
|
||||
assert result["available_models"] == []
|
||||
assert mock_lighthouse_instance.is_active is False
|
||||
mock_lighthouse_instance.save.assert_called_once()
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
import os
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
from tasks.jobs.export import (
|
||||
_compress_output_files,
|
||||
_generate_output_directory,
|
||||
_upload_to_s3,
|
||||
get_s3_client,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestOutputs:
|
||||
def test_compress_output_files_creates_zip(self, tmpdir):
|
||||
base_tmp = Path(str(tmpdir.mkdir("compress_output")))
|
||||
output_dir = base_tmp / "output"
|
||||
output_dir.mkdir()
|
||||
file_path = output_dir / "result.csv"
|
||||
file_path.write_text("data")
|
||||
|
||||
zip_path = _compress_output_files(str(output_dir))
|
||||
|
||||
assert zip_path.endswith(".zip")
|
||||
assert os.path.exists(zip_path)
|
||||
with zipfile.ZipFile(zip_path, "r") as zipf:
|
||||
assert "output/result.csv" in zipf.namelist()
|
||||
|
||||
@patch("tasks.jobs.export.boto3.client")
|
||||
@patch("tasks.jobs.export.settings")
|
||||
def test_get_s3_client_success(self, mock_settings, mock_boto_client):
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID = "test"
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = "test"
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = "token"
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = "eu-west-1"
|
||||
|
||||
client_mock = MagicMock()
|
||||
mock_boto_client.return_value = client_mock
|
||||
|
||||
client = get_s3_client()
|
||||
assert client is not None
|
||||
client_mock.list_buckets.assert_called()
|
||||
|
||||
@patch("tasks.jobs.export.boto3.client")
|
||||
@patch("tasks.jobs.export.settings")
|
||||
def test_get_s3_client_fallback(self, mock_settings, mock_boto_client):
|
||||
mock_boto_client.side_effect = [
|
||||
ClientError({"Error": {"Code": "403"}}, "ListBuckets"),
|
||||
MagicMock(),
|
||||
]
|
||||
client = get_s3_client()
|
||||
assert client is not None
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@patch("tasks.jobs.export.base")
|
||||
def test_upload_to_s3_success(self, mock_base, mock_get_client, tmpdir):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = "test-bucket"
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("upload_success")))
|
||||
zip_path = base_tmp / "outputs.zip"
|
||||
zip_path.write_bytes(b"dummy")
|
||||
|
||||
compliance_dir = base_tmp / "compliance"
|
||||
compliance_dir.mkdir()
|
||||
(compliance_dir / "report.csv").write_text("ok")
|
||||
|
||||
client_mock = MagicMock()
|
||||
mock_get_client.return_value = client_mock
|
||||
|
||||
result = _upload_to_s3("tenant-id", str(zip_path), "scan-id")
|
||||
|
||||
expected_uri = "s3://test-bucket/tenant-id/scan-id/outputs.zip"
|
||||
assert result == expected_uri
|
||||
assert client_mock.upload_file.call_count == 2
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@patch("tasks.jobs.export.base")
|
||||
def test_upload_to_s3_missing_bucket(self, mock_base, mock_get_client):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = ""
|
||||
result = _upload_to_s3("tenant", "/tmp/fake.zip", "scan")
|
||||
assert result is None
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@patch("tasks.jobs.export.base")
|
||||
def test_upload_to_s3_skips_non_files(self, mock_base, mock_get_client, tmpdir):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = "test-bucket"
|
||||
base_tmp = Path(str(tmpdir.mkdir("upload_skips_non_files")))
|
||||
|
||||
zip_path = base_tmp / "results.zip"
|
||||
zip_path.write_bytes(b"zip")
|
||||
|
||||
compliance_dir = base_tmp / "compliance"
|
||||
compliance_dir.mkdir()
|
||||
(compliance_dir / "subdir").mkdir()
|
||||
|
||||
client_mock = MagicMock()
|
||||
mock_get_client.return_value = client_mock
|
||||
|
||||
result = _upload_to_s3("tenant", str(zip_path), "scan")
|
||||
|
||||
expected_uri = "s3://test-bucket/tenant/scan/results.zip"
|
||||
assert result == expected_uri
|
||||
client_mock.upload_file.assert_called_once()
|
||||
|
||||
@patch(
|
||||
"tasks.jobs.export.get_s3_client",
|
||||
side_effect=ClientError({"Error": {}}, "Upload"),
|
||||
)
|
||||
@patch("tasks.jobs.export.base")
|
||||
@patch("tasks.jobs.export.logger.error")
|
||||
def test_upload_to_s3_failure_logs_error(
|
||||
self, mock_logger, mock_base, mock_get_client, tmpdir
|
||||
):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = "bucket"
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("upload_failure_logs")))
|
||||
zip_path = base_tmp / "zipfile.zip"
|
||||
zip_path.write_bytes(b"zip")
|
||||
|
||||
compliance_dir = base_tmp / "compliance"
|
||||
compliance_dir.mkdir()
|
||||
(compliance_dir / "report.csv").write_text("csv")
|
||||
|
||||
_upload_to_s3("tenant", str(zip_path), "scan")
|
||||
mock_logger.assert_called()
|
||||
|
||||
def test_generate_output_directory_creates_paths(self, tmpdir):
|
||||
from prowler.config.config import output_file_timestamp
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("generate_output")))
|
||||
base_dir = str(base_tmp)
|
||||
tenant_id = "t1"
|
||||
scan_id = "s1"
|
||||
provider = "aws"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
|
||||
assert path.endswith(f"{provider}-{output_file_timestamp}")
|
||||
assert compliance.endswith(f"{provider}-{output_file_timestamp}")
|
||||
|
||||
def test_generate_output_directory_invalid_character(self, tmpdir):
|
||||
from prowler.config.config import output_file_timestamp
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("generate_output")))
|
||||
base_dir = str(base_tmp)
|
||||
tenant_id = "t1"
|
||||
scan_id = "s1"
|
||||
provider = "aws/test@check"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
|
||||
assert path.endswith(f"aws-test-check-{output_file_timestamp}")
|
||||
assert compliance.endswith(f"aws-test-check-{output_file_timestamp}")
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user