mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-03-31 21:27:28 +00:00
Compare commits
318 Commits
fix/update
...
PRWLR-6707
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fcfd47ec2f | ||
|
|
349e05b1d5 | ||
|
|
505b4907d3 | ||
|
|
3ae15b30eb | ||
|
|
ce3097a051 | ||
|
|
150d62f48d | ||
|
|
b1341adc7c | ||
|
|
bc8e82adbd | ||
|
|
5a407ee4c2 | ||
|
|
3c7d7d7a69 | ||
|
|
03d24a0b99 | ||
|
|
7830ed8b9f | ||
|
|
d4e66c4a6f | ||
|
|
1cfe610d47 | ||
|
|
d9a9236ab7 | ||
|
|
285aea3458 | ||
|
|
e0258bab85 | ||
|
|
b051aeeb64 | ||
|
|
b99dce6a43 | ||
|
|
4819e7821f | ||
|
|
6555f335b2 | ||
|
|
04749c1da1 | ||
|
|
44d70f8467 | ||
|
|
95791a9909 | ||
|
|
ad0b8a4208 | ||
|
|
5669a42039 | ||
|
|
83b328ea92 | ||
|
|
a6c88c0d9e | ||
|
|
922f9d2f91 | ||
|
|
a69d0d16c0 | ||
|
|
676cc44fe2 | ||
|
|
3840e40870 | ||
|
|
ab2d57554a | ||
|
|
cbb5b21e6c | ||
|
|
1efd5668ce | ||
|
|
ca86aeb1d7 | ||
|
|
4f2a8b71bb | ||
|
|
3b0cb3db85 | ||
|
|
00c527ff79 | ||
|
|
ab348d5752 | ||
|
|
dd713351dc | ||
|
|
f352e11788 | ||
|
|
06e062a35e | ||
|
|
fa722f1dc7 | ||
|
|
b0cc3978d0 | ||
|
|
aa843b823c | ||
|
|
5f044f0d56 | ||
|
|
020edc0d1d | ||
|
|
036da81bbd | ||
|
|
4428bcb2c0 | ||
|
|
21de9a2f6f | ||
|
|
231d933b9e | ||
|
|
2ad360a7f9 | ||
|
|
51b67f00d6 | ||
|
|
ab378684ae | ||
|
|
e89df617ef | ||
|
|
8496a6b045 | ||
|
|
28f3cf363b | ||
|
|
eb3d4b25e3 | ||
|
|
1211fe706e | ||
|
|
c4a9280ebb | ||
|
|
0f12fb92ed | ||
|
|
4787d300f0 | ||
|
|
ee974a6316 | ||
|
|
d004a0c931 | ||
|
|
087e01cc4f | ||
|
|
74940e1fc4 | ||
|
|
19e35bf9a8 | ||
|
|
7213187e6c | ||
|
|
4b104e92f0 | ||
|
|
7179119b0e | ||
|
|
cf2738810a | ||
|
|
389216570a | ||
|
|
2becf45f33 | ||
|
|
c32ce7eb97 | ||
|
|
94e66a91a6 | ||
|
|
1ac4417f74 | ||
|
|
57c5f7c12d | ||
|
|
19203f92b3 | ||
|
|
c5b1bf3e52 | ||
|
|
f845176494 | ||
|
|
f0ed866946 | ||
|
|
289de9376f | ||
|
|
6874d318fa | ||
|
|
834a7d3b69 | ||
|
|
24a50c6ac2 | ||
|
|
ec8afd773f | ||
|
|
a09be4c0ba | ||
|
|
4b62fdcf53 | ||
|
|
bf0013dae3 | ||
|
|
c82cd5288c | ||
|
|
ad31a6b3f5 | ||
|
|
20c7c9f8de | ||
|
|
0cfe41e452 | ||
|
|
1b254feadc | ||
|
|
15954d8a01 | ||
|
|
ff122c9779 | ||
|
|
a012397e55 | ||
|
|
7da6d7b5dd | ||
|
|
db6a27d1f5 | ||
|
|
e07c833cab | ||
|
|
728fc9d6ff | ||
|
|
cf9ff78605 | ||
|
|
27a9b2d494 | ||
|
|
a2faf548af | ||
|
|
8bcec4926b | ||
|
|
a4e96f809b | ||
|
|
fa27255dd7 | ||
|
|
05360e469f | ||
|
|
9d405ddcbd | ||
|
|
430f831543 | ||
|
|
da9d7199b7 | ||
|
|
d63a383ec6 | ||
|
|
55c226029e | ||
|
|
8d2f6aa30c | ||
|
|
a319f80701 | ||
|
|
15a8671f0d | ||
|
|
d34e709d91 | ||
|
|
ddc53c3c6d | ||
|
|
a3aef18cfe | ||
|
|
49ca3ca325 | ||
|
|
89c67079a3 | ||
|
|
2de8075d87 | ||
|
|
e124275dbf | ||
|
|
760d28e752 | ||
|
|
3fb0733887 | ||
|
|
7de9a37edb | ||
|
|
fe00b788cc | ||
|
|
4c50f4d811 | ||
|
|
c0c736bffe | ||
|
|
a3aa7d0a63 | ||
|
|
3ceb86c4d9 | ||
|
|
3628e7b3e8 | ||
|
|
f29c2ac9f0 | ||
|
|
b4927c3ad1 | ||
|
|
19f3c1d310 | ||
|
|
cd97e57521 | ||
|
|
b38207507a | ||
|
|
ab96e0aac0 | ||
|
|
4477cecc59 | ||
|
|
641d671312 | ||
|
|
e7c2fa0699 | ||
|
|
7eb08b0f14 | ||
|
|
149cd3e535 | ||
|
|
6f3112f754 | ||
|
|
d185d7bac6 | ||
|
|
f5ecae6da1 | ||
|
|
1c75f6b804 | ||
|
|
91b64d8572 | ||
|
|
233ae74560 | ||
|
|
fac97f9785 | ||
|
|
e81c7a3893 | ||
|
|
d6f26df2e8 | ||
|
|
ece74e15fd | ||
|
|
eea6d07259 | ||
|
|
4a6d7a5be2 | ||
|
|
883c5d4e56 | ||
|
|
f1f998c2fa | ||
|
|
5276e38f1d | ||
|
|
ad98a4747f | ||
|
|
5798321dc6 | ||
|
|
878f4857af | ||
|
|
bf58728d29 | ||
|
|
fcea3b6570 | ||
|
|
965111245a | ||
|
|
f78a29206c | ||
|
|
c719d705e0 | ||
|
|
8948ee6868 | ||
|
|
24fb31e98f | ||
|
|
c8b193e658 | ||
|
|
6d27738c4d | ||
|
|
17b7becfdf | ||
|
|
cfa7f271d2 | ||
|
|
e61a97cb65 | ||
|
|
cd4a1ad8a7 | ||
|
|
e650d19a30 | ||
|
|
f930739a3d | ||
|
|
89fc698a0e | ||
|
|
76e058f28f | ||
|
|
6acb6bbf8e | ||
|
|
971424f822 | ||
|
|
9ba1ae1ced | ||
|
|
062db4cc70 | ||
|
|
dc4db10c41 | ||
|
|
68a542ef64 | ||
|
|
32f3787e18 | ||
|
|
6792bea319 | ||
|
|
ae4b43c137 | ||
|
|
d576c4f1c4 | ||
|
|
ddc0596aa2 | ||
|
|
636bdb6d0a | ||
|
|
4a839b0146 | ||
|
|
73e244dce5 | ||
|
|
6c86ff9d24 | ||
|
|
e152848a59 | ||
|
|
ab534fbe82 | ||
|
|
98d9d3993f | ||
|
|
b75d24ee08 | ||
|
|
d8ed70236b | ||
|
|
f7533de7dd | ||
|
|
f8b79ec791 | ||
|
|
bcc96ab4f2 | ||
|
|
fd53a8c9d0 | ||
|
|
7b58d1dd56 | ||
|
|
7858c147f7 | ||
|
|
8e635b3bd4 | ||
|
|
2e97e37316 | ||
|
|
cd804836a1 | ||
|
|
d102ee2fd5 | ||
|
|
325e5739a2 | ||
|
|
98da3059b4 | ||
|
|
c419d3ed6f | ||
|
|
80fd5d1ba6 | ||
|
|
85242c7909 | ||
|
|
ea6ab406c8 | ||
|
|
cbf2a28bac | ||
|
|
5b1e7bb7f9 | ||
|
|
e108b2caed | ||
|
|
df1abb2152 | ||
|
|
e0465f2aa2 | ||
|
|
51467767cd | ||
|
|
bc71e7fb3b | ||
|
|
6a331c05e8 | ||
|
|
7ab503a096 | ||
|
|
b368190c9f | ||
|
|
8915fdff18 | ||
|
|
9bf108e9cc | ||
|
|
87708e39cf | ||
|
|
44927c44e9 | ||
|
|
71aa29cf24 | ||
|
|
aa14daf0db | ||
|
|
eb5dbab86e | ||
|
|
223aab8ece | ||
|
|
3ec57340a0 | ||
|
|
80d73cc05b | ||
|
|
a9ca61202b | ||
|
|
94f02df11e | ||
|
|
c454ceb296 | ||
|
|
76ec13a1d6 | ||
|
|
783b6ea982 | ||
|
|
6b7b700a98 | ||
|
|
b3f2a1c532 | ||
|
|
c4e1bd3ed2 | ||
|
|
d0d4e0d483 | ||
|
|
14a9f0e765 | ||
|
|
b572575c8d | ||
|
|
a626e41162 | ||
|
|
22343faa1e | ||
|
|
c5b37887ef | ||
|
|
f9aed36d0b | ||
|
|
facc0627d7 | ||
|
|
76f0d890e9 | ||
|
|
7de7122c3b | ||
|
|
1b73ab2fe4 | ||
|
|
cc8f6131e6 | ||
|
|
dfd5c9aee7 | ||
|
|
3986bf3f42 | ||
|
|
c45ef1e286 | ||
|
|
8d8f498dc2 | ||
|
|
c4bd9122d4 | ||
|
|
644cdc81b9 | ||
|
|
e5584f21b3 | ||
|
|
b868d39bef | ||
|
|
ef9809f61f | ||
|
|
9a04ca3611 | ||
|
|
1c9b3a1394 | ||
|
|
5ee7bd6459 | ||
|
|
05d2b86ba8 | ||
|
|
84c30af6f8 | ||
|
|
e8a829b75e | ||
|
|
a0d169470d | ||
|
|
1fd6046511 | ||
|
|
524455b0f3 | ||
|
|
e6e1e37c1e | ||
|
|
2914510735 | ||
|
|
7e43c7797f | ||
|
|
6954ef880e | ||
|
|
5f5e7015a9 | ||
|
|
bfafa518b1 | ||
|
|
e4a78e509c | ||
|
|
e34e59ff2d | ||
|
|
7f80d2db46 | ||
|
|
4a2a3921da | ||
|
|
b54cc040b0 | ||
|
|
f1649f4958 | ||
|
|
1725264a80 | ||
|
|
f37d281025 | ||
|
|
6638aa09e9 | ||
|
|
e26b2e6527 | ||
|
|
954814c1d7 | ||
|
|
113224cbd9 | ||
|
|
f5f1fce779 | ||
|
|
0ba9383202 | ||
|
|
8e9a9797c7 | ||
|
|
2b4e6bffae | ||
|
|
74f7a86c2b | ||
|
|
e218435b2f | ||
|
|
5ec34ad5e7 | ||
|
|
c4b0859efd | ||
|
|
1241a490f9 | ||
|
|
4ec498a612 | ||
|
|
119c5e80a9 | ||
|
|
d393bc48a2 | ||
|
|
e09e3855b1 | ||
|
|
8751615faa | ||
|
|
e7c17ab0b3 | ||
|
|
f05d3eb334 | ||
|
|
cf449d4607 | ||
|
|
b338ac9add | ||
|
|
366d2b392a | ||
|
|
41fc536b44 | ||
|
|
6fd5217615 | ||
|
|
e042445ecf | ||
|
|
da9f4ea6e6 | ||
|
|
c17129afe3 | ||
|
|
7aacb0e1f1 | ||
|
|
f8cd91a0ce | ||
|
|
d2e29a337f |
15
.env
15
.env
@@ -6,11 +6,15 @@
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
# Google Tag Manager ID
|
||||
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
|
||||
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="stable"
|
||||
@@ -127,7 +131,7 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.6.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.7.5
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
@@ -139,5 +143,10 @@ SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
# Single Sign-On (SSO)
|
||||
SAML_PUBLIC_CERT=""
|
||||
SAML_PRIVATE_KEY=""
|
||||
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
|
||||
|
||||
# Lighthouse tracing
|
||||
LANGSMITH_TRACING=false
|
||||
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
|
||||
LANGSMITH_API_KEY=""
|
||||
LANGCHAIN_PROJECT=""
|
||||
|
||||
5
.github/labeler.yml
vendored
5
.github/labeler.yml
vendored
@@ -27,6 +27,11 @@ provider/github:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
provider/iac:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/iac/**"
|
||||
- any-glob-to-any-file: "tests/providers/iac/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
|
||||
@@ -6,6 +6,7 @@ on:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- "prowler/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
@@ -76,12 +77,12 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
@@ -94,7 +95,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
|
||||
4
.github/workflows/api-codeql.yml
vendored
4
.github/workflows/api-codeql.yml
vendored
@@ -48,12 +48,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
35
.github/workflows/api-pull-request.yml
vendored
35
.github/workflows/api-pull-request.yml
vendored
@@ -28,6 +28,10 @@ env:
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
IGNORE_FILES: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
api/CHANGELOG.md
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -78,12 +82,7 @@ jobs:
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
api/.github/**
|
||||
api/docs/**
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
working-directory: ./api
|
||||
@@ -137,12 +136,6 @@ jobs:
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Prevents known compatibility error between lxml and libxml2/libxmlsec versions - https://github.com/xmlsec/python-xmlsec/issues/320
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pip install --force-reinstall --no-binary lxml lxml
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -170,8 +163,9 @@ jobs:
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
@@ -193,7 +187,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -202,10 +196,19 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
|
||||
2
.github/workflows/find-secrets.yml
vendored
2
.github/workflows/find-secrets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@b06f6d72a3791308bb7ba59c2b8cb7a083bd17e4 # v3.88.26
|
||||
uses: trufflesecurity/trufflehog@6641d4ba5b684fffe195b9820345de1bf19f3181 # v3.89.2
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
257
.github/workflows/prowler-release-preparation.yml
vendored
Normal file
257
.github/workflows/prowler-release-preparation.yml
vendored
Normal file
@@ -0,0 +1,257 @@
|
||||
name: Prowler Release Preparation
|
||||
|
||||
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
prowler_version:
|
||||
description: 'Prowler version to release (e.g., 5.9.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
python3 -m pip install --user poetry
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Parse version and determine branch
|
||||
run: |
|
||||
# Validate version format (reusing pattern from sdk-bump-version.yml)
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PATCH_VERSION=${PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Determine branch name (format: v5.9)
|
||||
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
|
||||
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate UI version (1.X.X format - matches Prowler minor version)
|
||||
UI_VERSION="1.${MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate API version (1.X.X format - one minor version ahead)
|
||||
API_MINOR_VERSION=$((MINOR_VERSION + 1))
|
||||
API_VERSION="1.${API_MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler version: $PROWLER_VERSION"
|
||||
echo "Branch name: $BRANCH_NAME"
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout existing branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists locally, checking out..."
|
||||
git checkout "$BRANCH_NAME"
|
||||
elif git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists remotely, checking out..."
|
||||
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
|
||||
else
|
||||
echo "ERROR: Branch $BRANCH_NAME should exist for patch release $PROWLER_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify version in pyproject.toml
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ pyproject.toml version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in prowler/config/config.py
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in prowler/config/config.py (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in api/pyproject.toml
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in api/pyproject.toml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
|
||||
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$BRANCH_NAME_TRIMMED', found: '$CURRENT_PROWLER_REF')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify version in api/src/backend/api/v1/views.py
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in views.py (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Create release branch for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
echo "Minor release detected (patch = 0), creating new branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME" || git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "ERROR: Branch $BRANCH_NAME already exists for minor release $PROWLER_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
|
||||
- name: Update prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
|
||||
# Minor release: update the dependency to use the new branch
|
||||
echo "Minor release detected - updating prowler dependency from '$CURRENT_PROWLER_REF' to '$BRANCH_NAME_TRIMMED'"
|
||||
sed -i "s|prowler @ git+https://github.com/prowler-cloud/prowler.git@[^\"]*\"|prowler @ git+https://github.com/prowler-cloud/prowler.git@$BRANCH_NAME_TRIMMED\"|" api/pyproject.toml
|
||||
|
||||
# Verify the change was made
|
||||
UPDATED_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
if [ "$UPDATED_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
|
||||
echo "ERROR: Failed to update prowler dependency in api/pyproject.toml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Update poetry lock file
|
||||
echo "Updating poetry.lock file..."
|
||||
cd api
|
||||
poetry lock --no-update
|
||||
cd ..
|
||||
|
||||
# Commit and push the changes
|
||||
git add api/pyproject.toml api/poetry.lock
|
||||
git commit -m "chore(api): update prowler dependency to $BRANCH_NAME_TRIMMED for release $PROWLER_VERSION"
|
||||
git push origin "$BRANCH_NAME"
|
||||
|
||||
echo "✓ api/pyproject.toml prowler dependency updated to: $UPDATED_PROWLER_REF"
|
||||
|
||||
- name: Extract changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Extract changelogs
|
||||
echo "Extracting changelog entries..."
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK
|
||||
> combined_changelog.md
|
||||
|
||||
if [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
with:
|
||||
tag_name: ${{ env.PROWLER_VERSION }}
|
||||
name: Prowler ${{ env.PROWLER_VERSION }}
|
||||
body_path: combined_changelog.md
|
||||
draft: true
|
||||
target_commitish: ${{ env.PATCH_VERSION == '0' && 'master' || env.BRANCH_NAME }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Clean up temporary files
|
||||
run: |
|
||||
rm -f prowler_changelog.md api_changelog.md ui_changelog.md combined_changelog.md
|
||||
86
.github/workflows/pull-request-check-changelog.yml
vendored
Normal file
86
.github/workflows/pull-request-check-changelog.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: Check Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of changed files
|
||||
id: changed_files
|
||||
run: |
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
|
||||
cat changed_files.txt
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check_folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
if grep -q "^${folder}/" changed_files.txt; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
|
||||
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Comment on PR if changelog is missing
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs != ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
${{ steps.check_folders.outputs.missing_changelogs }}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.
|
||||
|
||||
- name: Comment on PR if all changelogs are present
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs == ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check_folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "ERROR: Missing changelog updates in some folders."
|
||||
exit 1
|
||||
@@ -123,11 +123,11 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
2
.github/workflows/sdk-bump-version.yml
vendored
2
.github/workflows/sdk-bump-version.yml
vendored
@@ -97,6 +97,7 @@ jobs:
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -135,6 +136,7 @@ jobs:
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
4
.github/workflows/sdk-codeql.yml
vendored
4
.github/workflows/sdk-codeql.yml
vendored
@@ -56,12 +56,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
26
.github/workflows/sdk-pull-request.yml
vendored
26
.github/workflows/sdk-pull-request.yml
vendored
@@ -102,8 +102,15 @@ jobs:
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
@@ -212,6 +219,21 @@ jobs:
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
# Test IaC
|
||||
- name: IaC - Check if any file has changed
|
||||
id: iac-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/iac/**
|
||||
./tests/providers/iac/**
|
||||
.poetry.lock
|
||||
|
||||
- name: IaC - Test
|
||||
if: steps.iac-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -226,7 +248,7 @@ jobs:
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
|
||||
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
|
||||
@@ -30,6 +30,7 @@ env:
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -76,16 +77,17 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
@@ -96,11 +98,12 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
|
||||
4
.github/workflows/ui-codeql.yml
vendored
4
.github/workflows/ui-codeql.yml
vendored
@@ -48,12 +48,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
107
.github/workflows/ui-e2e-tests.yml
vendored
Normal file
107
.github/workflows/ui-e2e-tests.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
name: UI - E2E Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- '.github/workflows/ui-e2e-tests.yml'
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
e2e-tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AUTH_SECRET: 'fallback-ci-secret-for-testing'
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: 'http://localhost:3000'
|
||||
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Start API services
|
||||
run: |
|
||||
docker compose up -d api worker worker-beat
|
||||
- name: Wait for API to be ready
|
||||
run: |
|
||||
echo "Waiting for prowler-api..."
|
||||
timeout=150 # 5 minutes max
|
||||
elapsed=0
|
||||
while [ $elapsed -lt $timeout ]; do
|
||||
if curl -s ${NEXT_PUBLIC_API_BASE_URL}/docs >/dev/null 2>&1; then
|
||||
echo "Prowler API is ready!"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for prowler-api... (${elapsed}s elapsed)"
|
||||
sleep 5
|
||||
elapsed=$((elapsed + 5))
|
||||
done
|
||||
echo "Timeout waiting for prowler-api to start"
|
||||
exit 1
|
||||
- name: Run database migrations
|
||||
run: |
|
||||
echo "Running Django migrations..."
|
||||
docker compose exec -T api sh -c '
|
||||
poetry run python manage.py migrate --database admin
|
||||
'
|
||||
echo "Database migrations completed!"
|
||||
# TODO: Delete this step once API image is built with new fixtures (Prowler 5.10.0)
|
||||
- name: Copy local fixtures into API container
|
||||
run: |
|
||||
# Get the actual container name dynamically
|
||||
CONTAINER_NAME=$(docker compose ps -q api)
|
||||
docker cp ./api/src/backend/api/fixtures/dev/. ${CONTAINER_NAME}:/home/prowler/backend/api/fixtures/dev
|
||||
- name: Load database fixtures for E2E tests
|
||||
run: |
|
||||
docker compose exec -T api sh -c '
|
||||
echo "Loading all fixtures from api/fixtures/dev/..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
echo "All database fixtures loaded successfully!"
|
||||
'
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install UI dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
- name: Build UI application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
- name: Run E2E tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
- name: Upload test reports
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
- name: Cleanup services
|
||||
if: always()
|
||||
run: |
|
||||
echo "Shutting down services..."
|
||||
docker compose down -v || true
|
||||
echo "Cleanup completed"
|
||||
9
.github/workflows/ui-pull-request.yml
vendored
9
.github/workflows/ui-pull-request.yml
vendored
@@ -34,23 +34,26 @@ jobs:
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm install
|
||||
run: npm ci
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -44,6 +44,16 @@ junit-reports/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
.cursor/
|
||||
|
||||
# RooCode files
|
||||
.roo/
|
||||
.rooignore
|
||||
.roomodes
|
||||
|
||||
# Cline files
|
||||
.cline/
|
||||
.clineignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
@@ -65,3 +75,6 @@ node_modules
|
||||
|
||||
# Persistent data
|
||||
_data/
|
||||
|
||||
# Compliance report
|
||||
*.pdf
|
||||
|
||||
@@ -115,7 +115,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429'
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
FROM python:3.12.11-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
@@ -6,7 +6,8 @@ LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
@@ -46,10 +47,6 @@ ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
# By default poetry does not compile Python source files to bytecode during installation.
|
||||
# This speeds up the installation process, but the first execution may take a little more
|
||||
# time because Python then compiles source files to bytecode automatically. If you want to
|
||||
# compile source files to bytecode during installation, you can use the --compile option
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
|
||||
14
README.md
14
README.md
@@ -87,11 +87,11 @@ prowler dashboard
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 567 | 82 | 36 | 10 |
|
||||
| GCP | 79 | 13 | 9 | 3 |
|
||||
| Azure | 142 | 18 | 10 | 3 |
|
||||
| GCP | 79 | 13 | 10 | 3 |
|
||||
| Azure | 142 | 18 | 11 | 3 |
|
||||
| Kubernetes | 83 | 7 | 5 | 7 |
|
||||
| GitHub | 16 | 2 | 1 | 0 |
|
||||
| M365 | 69 | 7 | 2 | 2 |
|
||||
| M365 | 69 | 7 | 3 | 2 |
|
||||
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
|
||||
|
||||
> [!Note]
|
||||
@@ -136,6 +136,14 @@ If your workstation's architecture is incompatible, you can resolve this by:
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
|
||||
### Common Issues with Docker Pull Installation
|
||||
|
||||
> [!Note]
|
||||
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
|
||||
|
||||
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
|
||||
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
168
api/.gitignore
vendored
168
api/.gitignore
vendored
@@ -1,168 +0,0 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
/_data/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
@@ -1,91 +0,0 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
|
||||
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
args: ["--directory=src"]
|
||||
- id: poetry-lock
|
||||
args: ["--no-update", "--directory=src"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013", "Dockerfile"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963,74429'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
163
api/CHANGELOG.md
163
api/CHANGELOG.md
@@ -2,60 +2,140 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [v1.9.0] (Prowler UNRELEASED)
|
||||
## [1.11.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- Github provider support [(#8271)](https://github.com/prowler-cloud/prowler/pull/8271)
|
||||
- PDF reporting for Prowler TheatScore [(#8018)](https://github.com/prowler-cloud/prowler/pull/8018)
|
||||
|
||||
- Added SSO with SAML support [(#7822)](https://github.com/prowler-cloud/prowler/pull/7822).
|
||||
- Support GCP Service Account key. [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
- Added new `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877).
|
||||
---
|
||||
|
||||
## [1.10.2] (Prowler v5.9.2)
|
||||
|
||||
### Changed
|
||||
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877).
|
||||
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.1] (Prowler v5.9.1)
|
||||
|
||||
### Fixed
|
||||
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.0] (Prowler v5.9.0)
|
||||
|
||||
### Added
|
||||
- SSO with SAML support [(#8175)](https://github.com/prowler-cloud/prowler/pull/8175)
|
||||
- `GET /resources/metadata`, `GET /resources/metadata/latest` and `GET /resources/latest` to expose resource metadata and latest scan results [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
|
||||
### Changed
|
||||
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
|
||||
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
|
||||
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
|
||||
|
||||
### Fixed
|
||||
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
|
||||
|
||||
### Changed
|
||||
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
|
||||
|
||||
### Security
|
||||
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.1] (Prowler v5.8.1)
|
||||
|
||||
### Added
|
||||
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
|
||||
|
||||
### Changed
|
||||
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
|
||||
|
||||
### Fixed
|
||||
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
|
||||
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
|
||||
|
||||
### Removed
|
||||
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.0] (Prowler v5.8.0)
|
||||
|
||||
### Added
|
||||
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
|
||||
|
||||
### Changed
|
||||
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
|
||||
|
||||
### Fixed
|
||||
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.5] (Prowler v5.7.5)
|
||||
|
||||
### Fixed
|
||||
- Normalize provider UID to ensure safe and unique export directory paths [(#8007)](https://github.com/prowler-cloud/prowler/pull/8007).
|
||||
- Blank resource types in `/metadata` endpoints [(#8027)](https://github.com/prowler-cloud/prowler/pull/8027)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.4] (Prowler v5.7.4)
|
||||
|
||||
### Removed
|
||||
- Reverted RLS transaction handling and DB custom backend [(#7994)](https://github.com/prowler-cloud/prowler/pull/7994)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.3] (Prowler v5.7.3)
|
||||
|
||||
### Added
|
||||
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935).
|
||||
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935)
|
||||
|
||||
### Changed
|
||||
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
|
||||
|
||||
### Fixed
|
||||
- Fixed transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916).
|
||||
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932).
|
||||
|
||||
- Transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916)
|
||||
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.2] (Prowler v5.7.2)
|
||||
|
||||
### Fixed
|
||||
- Fixed task lookup to use task_kwargs instead of task_args for scan report resolution. [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
|
||||
- Fixed Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
|
||||
- Fixed the connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
|
||||
- Fixed a race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876).
|
||||
- Fixed an error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890).
|
||||
- Task lookup to use task_kwargs instead of task_args for scan report resolution [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
|
||||
- Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
|
||||
- Connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
|
||||
- Race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876)
|
||||
- Error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.1] (Prowler v5.7.1)
|
||||
|
||||
### Fixed
|
||||
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800).
|
||||
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.0] (Prowler v5.7.0)
|
||||
|
||||
### Added
|
||||
- Added huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added new queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added new endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743).
|
||||
- Added export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
- Huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- New endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743)
|
||||
- Export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
|
||||
---
|
||||
|
||||
@@ -63,9 +143,9 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Added
|
||||
|
||||
- Added M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563).
|
||||
- Added a `compliance/` folder and ZIP‐export functionality for all compliance reports.[(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
- Added a new API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
- M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563)
|
||||
- `compliance/` folder and ZIP‐export functionality for all compliance reports [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
- API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
|
||||
---
|
||||
|
||||
@@ -73,43 +153,42 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289).
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333).
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378).
|
||||
- Added missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318).
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167)
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289)
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333)
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378)
|
||||
- Missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.4] (Prowler v5.4.4)
|
||||
|
||||
### Fixed
|
||||
- Fixed a bug with periodic tasks when trying to delete a provider ([#7466])(https://github.com/prowler-cloud/prowler/pull/7466).
|
||||
- Bug with periodic tasks when trying to delete a provider [(#7466)](https://github.com/prowler-cloud/prowler/pull/7466)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
|
||||
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
|
||||
- Duplicated scheduled scans handling [(#7401)](https://github.com/prowler-cloud/prowler/pull/7401)
|
||||
- Environment variable to configure the deletion task batch size [(#7423)](https://github.com/prowler-cloud/prowler/pull/7423)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
|
||||
### Fixed
|
||||
- Added a handled response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183).
|
||||
- Fixed a race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172).
|
||||
- Handled exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283).
|
||||
|
||||
- Handle response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183)
|
||||
- Race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172)
|
||||
- Handle exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283)
|
||||
|
||||
---
|
||||
|
||||
@@ -117,20 +196,20 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Added
|
||||
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
|
||||
- Add API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878).
|
||||
- API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878)
|
||||
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
|
||||
|
||||
### Changed
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019).
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019)
|
||||
|
||||
---
|
||||
|
||||
## [v1.4.0] (Prowler v5.3.0)
|
||||
|
||||
### Changed
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700)
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800)
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863)
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869)
|
||||
|
||||
---
|
||||
|
||||
@@ -57,10 +57,6 @@ RUN poetry install --no-root && \
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
# Prevents known compatibility error between lxml and libxml2/libxmlsec versions.
|
||||
# See: https://github.com/xmlsec/python-xmlsec/issues/320
|
||||
RUN poetry run pip install --force-reinstall --no-binary lxml lxml
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
|
||||
@@ -257,7 +257,7 @@ cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
|
||||
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
|
||||
|
||||
## Run tests
|
||||
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
|
||||
# Fix Inconsistent migration history after adding sites app
|
||||
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
|
||||
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
@@ -28,7 +32,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
1212
api/poetry.lock
generated
1212
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -23,11 +23,16 @@ dependencies = [
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10"
|
||||
"uuid6==2024.7.10",
|
||||
"openai (>=1.82.0,<2.0.0)",
|
||||
"xmlsec==1.3.14",
|
||||
"matplotlib>=3.8.0,<4.0.0",
|
||||
"reportlab>=4.0.0,<5.0.0"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -35,7 +40,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.9.0"
|
||||
version = "1.10.2"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -3,14 +3,7 @@ from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Membership,
|
||||
Role,
|
||||
SAMLConfiguration,
|
||||
Tenant,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
|
||||
|
||||
|
||||
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
@@ -24,7 +17,7 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
if sociallogin.account.provider == "saml":
|
||||
if sociallogin.provider.id == "saml":
|
||||
email = sociallogin.user.email
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
@@ -38,57 +31,10 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
provider = sociallogin.account.provider
|
||||
provider = sociallogin.provider.id
|
||||
extra = sociallogin.account.extra_data
|
||||
|
||||
if provider == "saml":
|
||||
# Handle SAML-specific logic
|
||||
user.first_name = extra.get("firstName", [""])[0]
|
||||
user.last_name = extra.get("lastName", [""])[0]
|
||||
user.company_name = extra.get("organization", [""])[0]
|
||||
user.name = f"{user.first_name} {user.last_name}".strip()
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
email_domain = user.email.split("@")[-1]
|
||||
tenant = (
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db)
|
||||
.get(email_domain=email_domain)
|
||||
.tenant
|
||||
)
|
||||
|
||||
with rls_transaction(str(tenant.id)):
|
||||
role_name = extra.get("userType", ["saml_default_role"])[0].strip()
|
||||
|
||||
try:
|
||||
role = Role.objects.using(MainRouter.admin_db).get(
|
||||
name=role_name, tenant_id=tenant.id
|
||||
)
|
||||
except Role.DoesNotExist:
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name=role_name,
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.MEMBER,
|
||||
)
|
||||
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
else:
|
||||
if provider != "saml":
|
||||
# Handle other providers (e.g., GitHub, Google)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = extra.get("name")
|
||||
@@ -119,5 +65,7 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
else:
|
||||
request.session["saml_user_created"] = str(user.id)
|
||||
|
||||
return user
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
@@ -46,9 +47,11 @@ class BaseViewSet(ModelViewSet):
|
||||
|
||||
|
||||
class BaseRLSViewSet(BaseViewSet):
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# Ideally, this logic would be in the `.setup()` method but DRF view sets don't call it
|
||||
# https://docs.djangoproject.com/en/5.1/ref/class-based-views/base/#django.views.generic.base.View.setup
|
||||
if request.auth is None:
|
||||
@@ -58,19 +61,9 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
self.request.tenant_id = tenant_id
|
||||
|
||||
self._rls_cm = rls_transaction(tenant_id)
|
||||
self._rls_cm.__enter__()
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
if hasattr(self, "_rls_cm"):
|
||||
self._rls_cm.__exit__(None, None, None)
|
||||
del self._rls_cm
|
||||
|
||||
return response
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
@@ -80,7 +73,8 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
with transaction.atomic():
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
@@ -115,8 +109,6 @@ class BaseTenantViewset(BaseViewSet):
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
@@ -125,27 +117,19 @@ class BaseTenantViewset(BaseViewSet):
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
user_id = str(request.user.id)
|
||||
|
||||
self._rls_cm = rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR)
|
||||
self._rls_cm.__enter__()
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
if hasattr(self, "_rls_cm"):
|
||||
self._rls_cm.__exit__(None, None, None)
|
||||
del self._rls_cm
|
||||
|
||||
return response
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUserViewset(BaseViewSet):
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# TODO refactor after improving RLS on users
|
||||
if request.stream is not None and request.stream.method == "POST":
|
||||
return
|
||||
return super().initial(request, *args, **kwargs)
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
@@ -153,16 +137,6 @@ class BaseUserViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
self.request.tenant_id = tenant_id
|
||||
|
||||
self._rls_cm = rls_transaction(tenant_id)
|
||||
self._rls_cm.__enter__()
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
if hasattr(self, "_rls_cm"):
|
||||
self._rls_cm.__exit__(None, None, None)
|
||||
del self._rls_cm
|
||||
|
||||
return response
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
@@ -196,6 +196,10 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"tactics": getattr(requirement, "Tactics", []),
|
||||
"subtechniques": getattr(requirement, "SubTechniques", []),
|
||||
"platforms": getattr(requirement, "Platforms", []),
|
||||
"technique_url": getattr(requirement, "TechniqueURL", ""),
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
|
||||
@@ -175,6 +175,29 @@ def create_objects_in_batches(
|
||||
model.objects.bulk_create(chunk, batch_size)
|
||||
|
||||
|
||||
def update_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, fields: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-update model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_update()` will be called.
|
||||
objects (list): List of model instances (saved) to bulk-update.
|
||||
fields (list): List of field names to update.
|
||||
batch_size (int): Maximum number of objects per bulk_update call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for start in range(0, total, batch_size):
|
||||
chunk = objects[start : start + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_update(chunk, fields, batch_size)
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
@@ -529,3 +552,15 @@ class IntegrationTypeEnum(EnumType):
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Processor type
|
||||
|
||||
|
||||
class ProcessorTypeEnum(EnumType):
|
||||
enum_type_name = "processor_type"
|
||||
|
||||
|
||||
class ProcessorTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("processor_type", *args, **kwargs)
|
||||
|
||||
@@ -57,6 +57,11 @@ class TaskInProgressException(TaskManagementError):
|
||||
super().__init__()
|
||||
|
||||
|
||||
# Provider connection errors
|
||||
class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
@@ -73,3 +78,21 @@ def custom_exception_handler(exc, context):
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ConflictException(APIException):
|
||||
status_code = status.HTTP_409_CONFLICT
|
||||
default_detail = "A conflict occurred. The resource already exists."
|
||||
default_code = "conflict"
|
||||
|
||||
def __init__(self, detail=None, code=None, pointer=None):
|
||||
error_detail = {
|
||||
"detail": detail or self.default_detail,
|
||||
"status": self.status_code,
|
||||
"code": self.default_code,
|
||||
}
|
||||
|
||||
if pointer:
|
||||
error_detail["source"] = {"pointer": pointer}
|
||||
|
||||
super().__init__(detail=[error_detail])
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
|
||||
from dateutil.parser import parse
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django_filters.rest_framework import (
|
||||
@@ -28,6 +29,7 @@ from api.models import (
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
@@ -338,6 +340,8 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
scan = UUIDFilter(field_name="provider__scan", lookup_expr="exact")
|
||||
scan__in = UUIDInFilter(field_name="provider__scan", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
@@ -352,6 +356,82 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("updated_at")
|
||||
or self.data.get("updated_at__date")
|
||||
or self.data.get("updated_at__gte")
|
||||
or self.data.get("updated_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[updated_at], filter[updated_at.gte], "
|
||||
"or filter[updated_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
parse(self.data.get("updated_at__gte")).date()
|
||||
if self.data.get("updated_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
parse(self.data.get("updated_at__lte")).date()
|
||||
if self.data.get("updated_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class LatestResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
@@ -704,3 +784,12 @@ class IntegrationFilter(FilterSet):
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProcessorFilter(FilterSet):
|
||||
processor_type = ChoiceFilter(choices=Processor.ProcessorChoices.choices)
|
||||
processor_type__in = ChoiceInFilter(
|
||||
choices=Processor.ProcessorChoices.choices,
|
||||
field_name="processor_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
@@ -16,7 +16,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
@@ -24,5 +24,18 @@
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-18T09:04:20.850Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "E2E Test User",
|
||||
"email": "e2e@prowler.com",
|
||||
"company_name": "Prowler E2E Tests",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-01-01T00:00:00.850Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -46,5 +46,24 @@
|
||||
"role": "member",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"fields": {
|
||||
"inserted_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-01-01T00:00:00Z",
|
||||
"name": "E2E Test Tenant"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "9b1a2c3d-4e5f-6789-abc1-23456789def0",
|
||||
"fields": {
|
||||
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -149,5 +149,32 @@
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"inserted_at": "2024-11-20T15:36:14.302Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
|
||||
"fields": {
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"name": "e2e_admin",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-01-01T00:00:00.000Z",
|
||||
"updated_at": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "f1e2d3c4-b5a6-9876-5432-10fedcba9876",
|
||||
"fields": {
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"role": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
|
||||
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"inserted_at": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
|
||||
|
||||
def table_exists(table_name):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = %s
|
||||
)
|
||||
""",
|
||||
[table_name],
|
||||
)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix migration inconsistency between socialaccount and sites"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--database",
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help="Specifies the database to operate on.",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
db = options["database"]
|
||||
connection = connections[db]
|
||||
recorder = MigrationRecorder(connection)
|
||||
|
||||
applied = set(recorder.applied_migrations())
|
||||
|
||||
has_social = ("socialaccount", "0001_initial") in applied
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_name = 'django_site'
|
||||
);
|
||||
"""
|
||||
)
|
||||
site_table_exists = cursor.fetchone()[0]
|
||||
|
||||
if has_social and not site_table_exists:
|
||||
self.stdout.write(
|
||||
f"Detected inconsistency in '{db}'. Creating 'django_site' table manually..."
|
||||
)
|
||||
|
||||
with transaction.atomic(using=db):
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(Site)
|
||||
|
||||
recorder.record_applied("sites", "0001_initial")
|
||||
recorder.record_applied("sites", "0002_alter_domain_unique")
|
||||
|
||||
self.stdout.write(
|
||||
"Fixed: 'django_site' table created and migrations registered."
|
||||
)
|
||||
|
||||
# Ensure the relationship table also exists
|
||||
if not table_exists("socialaccount_socialapp_sites"):
|
||||
self.stdout.write(
|
||||
"Detected missing 'socialaccount_socialapp_sites' table. Creating manually..."
|
||||
)
|
||||
with connection.schema_editor() as schema_editor:
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
schema_editor.create_model(
|
||||
SocialApp._meta.get_field("sites").remote_field.through
|
||||
)
|
||||
self.stdout.write(
|
||||
"Fixed: 'socialaccount_socialapp_sites' table created."
|
||||
)
|
||||
107
api/src/backend/api/migrations/0030_lighthouseconfiguration.py
Normal file
107
api/src/backend/api/migrations/0030_lighthouseconfiguration.py
Normal file
@@ -0,0 +1,107 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-12 12:45
|
||||
|
||||
import uuid
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0029_findings_check_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LighthouseConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="Name of the configuration",
|
||||
max_length=100,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
(
|
||||
"api_key",
|
||||
models.BinaryField(
|
||||
help_text="Encrypted API key for the LLM service"
|
||||
),
|
||||
),
|
||||
(
|
||||
"model",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("gpt-4o-2024-11-20", "GPT-4o v2024-11-20"),
|
||||
("gpt-4o-2024-08-06", "GPT-4o v2024-08-06"),
|
||||
("gpt-4o-2024-05-13", "GPT-4o v2024-05-13"),
|
||||
("gpt-4o", "GPT-4o Default"),
|
||||
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
|
||||
("gpt-4o-mini", "GPT-4o Mini Default"),
|
||||
],
|
||||
default="gpt-4o-2024-08-06",
|
||||
help_text="Must be one of the supported model names",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"temperature",
|
||||
models.FloatField(default=0, help_text="Must be between 0 and 1"),
|
||||
),
|
||||
(
|
||||
"max_tokens",
|
||||
models.IntegerField(
|
||||
default=4000, help_text="Must be between 500 and 5000"
|
||||
),
|
||||
),
|
||||
(
|
||||
"business_context",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Additional business context for this AI model configuration",
|
||||
),
|
||||
),
|
||||
("is_active", models.BooleanField(default=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "lighthouse_configurations",
|
||||
"abstract": False,
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id",),
|
||||
name="unique_lighthouse_config_per_tenant",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_lighthouseconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-23 10:04
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0030_lighthouseconfiguration"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,57 +1,61 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-15 09:54
|
||||
# Generated by Django 5.1.10 on 2025-07-02 15:47
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0029_findings_check_index_parent"),
|
||||
("api", "0031_scan_disable_on_cascade_periodic_tasks"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="integration",
|
||||
name="integration_type",
|
||||
field=api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLDomainIndex",
|
||||
name="SAMLToken",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("email_domain", models.CharField(max_length=254, unique=True)),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("expires_at", models.DateTimeField(editable=False)),
|
||||
("token", models.JSONField(unique=True)),
|
||||
(
|
||||
"tenant",
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_domain_index",
|
||||
"db_table": "saml_tokens",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("email_domain", "tenant"),
|
||||
name="unique_resources_by_email_domain",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=api.rls.BaseSecurityConstraint(
|
||||
name="statements_on_samldomainindex",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLConfiguration",
|
||||
fields=[
|
||||
@@ -105,16 +109,42 @@ class Migration(migrations.Migration):
|
||||
fields=("tenant",), name="unique_samlconfig_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="integration",
|
||||
name="integration_type",
|
||||
field=api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
migrations.CreateModel(
|
||||
name="SAMLDomainIndex",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("email_domain", models.CharField(max_length=254, unique=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_domain_index",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("email_domain", "tenant"),
|
||||
name="unique_resources_by_email_domain",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=api.rls.BaseSecurityConstraint(
|
||||
name="statements_on_samldomainindex",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
34
api/src/backend/api/migrations/0033_processors_enum.py
Normal file
34
api/src/backend/api/migrations/0033_processors_enum.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import PostgresEnumMigration, ProcessorTypeEnum, register_enum
|
||||
from api.models import Processor
|
||||
|
||||
ProcessorTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="processor_type",
|
||||
enum_values=tuple(
|
||||
processor_type[0] for processor_type in Processor.ProcessorChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0032_saml"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
ProcessorTypeEnumMigration.create_enum_type,
|
||||
reverse_code=ProcessorTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=ProcessorTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
88
api/src/backend/api/migrations/0034_processors.py
Normal file
88
api/src/backend/api/migrations/0034_processors.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-26 13:04
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0033_processors_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Processor",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"processor_type",
|
||||
api.db_utils.ProcessorTypeEnumField(
|
||||
choices=[("mutelist", "Mutelist")]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "processors",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"], name="processor_tenant_id_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "processor_type"],
|
||||
name="processor_tenant_type_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "processor_type"),
|
||||
name="unique_processor_types_tenant",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_processor",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="processor",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
to="api.processor",
|
||||
),
|
||||
),
|
||||
]
|
||||
22
api/src/backend/api/migrations/0035_finding_muted_reason.py
Normal file
22
api/src/backend/api/migrations/0035_finding_muted_reason.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0034_processors"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted_reason",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
max_length=500,
|
||||
null=True,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0035_finding_muted_reason"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
columns="tenant_id, finding_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0036_rfm_tenant_finding_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0037_rfm_tenant_finding_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="failed_findings_count",
|
||||
field=models.IntegerField(default=0),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0038_resource_failed_findings_count"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "-failed_findings_count", "id"],
|
||||
name="resources_failed_findings_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0039_resource_resources_failed_findings_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
columns="tenant_id, resource_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0040_rfm_tenant_resource_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "resource_id"],
|
||||
name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0041_rfm_tenant_resource_parent_partitions"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state", "completed")),
|
||||
fields=["tenant_id", "provider_id", "-inserted_at"],
|
||||
include=("id",),
|
||||
name="scans_prov_ins_desc_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
33
api/src/backend/api/migrations/0043_github_provider.py
Normal file
33
api/src/backend/api/migrations/0043_github_provider.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 5.1.7 on 2025-07-09 14:44
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0042_scan_scans_prov_ins_desc_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="provider",
|
||||
field=api.db_utils.ProviderEnumField(
|
||||
choices=[
|
||||
("aws", "AWS"),
|
||||
("azure", "Azure"),
|
||||
("gcp", "GCP"),
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
("github", "GitHub"),
|
||||
],
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'github';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,11 +1,14 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.settings.social_login import SOCIALACCOUNT_PROVIDERS
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.fernet import Fernet, InvalidToken
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AbstractBaseUser
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
@@ -31,6 +34,7 @@ from api.db_utils import (
|
||||
IntegrationTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
MemberRoleEnumField,
|
||||
ProcessorTypeEnumField,
|
||||
ProviderEnumField,
|
||||
ProviderSecretTypeEnumField,
|
||||
ScanTriggerEnumField,
|
||||
@@ -55,6 +59,8 @@ fernet = Fernet(settings.SECRETS_ENCRYPTION_KEY.encode())
|
||||
# Convert Prowler Severity enum to Django TextChoices
|
||||
SeverityChoices = enum_to_choices(Severity)
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
|
||||
class StatusChoices(models.TextChoices):
|
||||
"""
|
||||
@@ -199,6 +205,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
GCP = "gcp", _("GCP")
|
||||
KUBERNETES = "kubernetes", _("Kubernetes")
|
||||
M365 = "m365", _("M365")
|
||||
GITHUB = "github", _("GitHub")
|
||||
|
||||
@staticmethod
|
||||
def validate_aws_uid(value):
|
||||
@@ -259,6 +266,16 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_github_uid(value):
|
||||
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9-]{0,38}$", value):
|
||||
raise ModelValidationError(
|
||||
detail="GitHub provider ID must be a valid GitHub username or organization name (1-39 characters, "
|
||||
"starting with alphanumeric, containing only alphanumeric characters and hyphens).",
|
||||
code="github-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
@@ -404,20 +421,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
name = models.CharField(
|
||||
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
)
|
||||
task = models.ForeignKey(
|
||||
Task,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
trigger = ScanTriggerEnumField(
|
||||
choices=TriggerChoices.choices,
|
||||
)
|
||||
@@ -433,11 +436,31 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
next_scan_at = models.DateTimeField(null=True, blank=True)
|
||||
scheduler_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
output_location = models.CharField(blank=True, null=True, max_length=200)
|
||||
|
||||
# TODO: mutelist foreign key
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
)
|
||||
task = models.ForeignKey(
|
||||
Task,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
processor = models.ForeignKey(
|
||||
"Processor",
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "scans"
|
||||
@@ -464,6 +487,13 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
# TODO This might replace `scans_prov_state_ins_desc_idx` completely. Review usage
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id", "-inserted_at"],
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
include=["id"],
|
||||
name="scans_prov_ins_desc_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -549,6 +579,8 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
details = models.TextField(blank=True, null=True)
|
||||
partition = models.TextField(blank=True, null=True)
|
||||
|
||||
failed_findings_count = models.IntegerField(default=0)
|
||||
|
||||
# Relationships
|
||||
tags = models.ManyToManyField(
|
||||
ResourceTag,
|
||||
@@ -595,6 +627,10 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "-failed_findings_count", "id"],
|
||||
name="resources_failed_findings_idx",
|
||||
),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
@@ -693,6 +729,9 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
check_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
check_metadata = models.JSONField(default=dict, null=False)
|
||||
muted = models.BooleanField(default=False, null=False)
|
||||
muted_reason = models.TextField(
|
||||
blank=True, null=True, validators=[MinLengthValidator(3)], max_length=500
|
||||
)
|
||||
compliance = models.JSONField(default=dict, null=True, blank=True)
|
||||
|
||||
# Denormalize resource data for performance
|
||||
@@ -834,6 +873,16 @@ class ResourceFindingMapping(PostgresPartitionedModel, RowLevelSecurityProtected
|
||||
# - tenant_id
|
||||
# - id
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "resource_id"],
|
||||
name="rfm_tenant_resource_idx",
|
||||
),
|
||||
]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "resource_id", "finding_id"),
|
||||
@@ -938,6 +987,11 @@ class Invitation(RowLevelSecurityProtectedModel):
|
||||
null=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.email:
|
||||
self.email = self.email.strip().lower()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "invitations"
|
||||
|
||||
@@ -1366,6 +1420,26 @@ class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
|
||||
class SAMLToken(models.Model):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
expires_at = models.DateTimeField(editable=False)
|
||||
token = models.JSONField(unique=True)
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
db_table = "saml_tokens"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.expires_at:
|
||||
self.expires_at = datetime.now(timezone.utc) + timedelta(seconds=15)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
return datetime.now(timezone.utc) >= self.expires_at
|
||||
|
||||
|
||||
class SAMLDomainIndex(models.Model):
|
||||
"""
|
||||
Public index of SAML domains. No RLS. Used for fast lookup in SAML login flow.
|
||||
@@ -1443,7 +1517,7 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
),
|
||||
]
|
||||
|
||||
def clean(self, old_email_domain=None):
|
||||
def clean(self, old_email_domain=None, is_create=False):
|
||||
# Domain must not contain @
|
||||
if "@" in self.email_domain:
|
||||
raise ValidationError({"email_domain": "Domain must not contain @"})
|
||||
@@ -1467,6 +1541,25 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
{"tenant": "There is a problem with your email domain."}
|
||||
)
|
||||
|
||||
# The entityID must be unique in the system
|
||||
idp_settings = self._parsed_metadata
|
||||
entity_id = idp_settings.get("entity_id")
|
||||
|
||||
if entity_id:
|
||||
# Find any SocialApp with this entityID
|
||||
q = SocialApp.objects.filter(provider="saml", provider_id=entity_id)
|
||||
|
||||
# If updating, exclude our own SocialApp from the check
|
||||
if not is_create:
|
||||
q = q.exclude(client_id=old_email_domain)
|
||||
else:
|
||||
q = q.exclude(client_id=self.email_domain)
|
||||
|
||||
if q.exists():
|
||||
raise ValidationError(
|
||||
{"metadata_xml": "There is a problem with your metadata."}
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.email_domain = self.email_domain.strip().lower()
|
||||
is_create = not SAMLConfiguration.objects.filter(pk=self.pk).exists()
|
||||
@@ -1479,7 +1572,8 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
old_email_domain = None
|
||||
old_metadata_xml = None
|
||||
|
||||
self.clean(old_email_domain)
|
||||
self._parsed_metadata = self._parse_metadata()
|
||||
self.clean(old_email_domain, is_create)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
if is_create or (
|
||||
@@ -1497,6 +1591,12 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
email_domain=self.email_domain, defaults={"tenant": self.tenant}
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
SocialApp.objects.filter(provider="saml", client_id=self.email_domain).delete()
|
||||
SAMLDomainIndex.objects.filter(email_domain=self.email_domain).delete()
|
||||
|
||||
def _parse_metadata(self):
|
||||
"""
|
||||
Parse the raw IdP metadata XML and extract:
|
||||
@@ -1516,6 +1616,8 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
|
||||
# Entity ID
|
||||
entity_id = root.attrib.get("entityID")
|
||||
if not entity_id:
|
||||
raise ValidationError({"metadata_xml": "Missing entityID in metadata."})
|
||||
|
||||
# SSO endpoint (must exist)
|
||||
sso = root.find(".//md:IDPSSODescriptor/md:SingleSignOnService", ns)
|
||||
@@ -1554,9 +1656,8 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
Create or update the corresponding SocialApp based on email_domain.
|
||||
If the domain changed, update the matching SocialApp.
|
||||
"""
|
||||
idp_settings = self._parse_metadata()
|
||||
settings_dict = SOCIALACCOUNT_PROVIDERS["saml"].copy()
|
||||
settings_dict["idp"] = idp_settings
|
||||
settings_dict["idp"] = self._parsed_metadata
|
||||
|
||||
current_site = Site.objects.get(id=settings.SITE_ID)
|
||||
|
||||
@@ -1564,19 +1665,24 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
provider="saml", client_id=previous_email_domain or self.email_domain
|
||||
)
|
||||
|
||||
client_id = self.email_domain[:191]
|
||||
name = f"SAML-{self.email_domain}"[:40]
|
||||
|
||||
if social_app_qs.exists():
|
||||
social_app = social_app_qs.first()
|
||||
social_app.client_id = self.email_domain
|
||||
social_app.name = f"{self.tenant.name} SAML ({self.email_domain})"
|
||||
social_app.client_id = client_id
|
||||
social_app.name = name
|
||||
social_app.settings = settings_dict
|
||||
social_app.provider_id = self._parsed_metadata["entity_id"]
|
||||
social_app.save()
|
||||
social_app.sites.set([current_site])
|
||||
else:
|
||||
social_app = SocialApp.objects.create(
|
||||
provider="saml",
|
||||
client_id=self.email_domain,
|
||||
name=f"{self.tenant.name} SAML ({self.email_domain})",
|
||||
client_id=client_id,
|
||||
name=name,
|
||||
settings=settings_dict,
|
||||
provider_id=self._parsed_metadata["entity_id"],
|
||||
)
|
||||
social_app.sites.set([current_site])
|
||||
|
||||
@@ -1628,3 +1734,169 @@ class ResourceScanSummary(RowLevelSecurityProtectedModel):
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class LighthouseConfiguration(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Stores configuration and API keys for LLM services.
|
||||
"""
|
||||
|
||||
class ModelChoices(models.TextChoices):
|
||||
GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", _("GPT-4o v2024-11-20")
|
||||
GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", _("GPT-4o v2024-08-06")
|
||||
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", _("GPT-4o v2024-05-13")
|
||||
GPT_4O = "gpt-4o", _("GPT-4o Default")
|
||||
GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", _("GPT-4o Mini v2024-07-18")
|
||||
GPT_4O_MINI = "gpt-4o-mini", _("GPT-4o Mini Default")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
validators=[MinLengthValidator(3)],
|
||||
blank=False,
|
||||
null=False,
|
||||
help_text="Name of the configuration",
|
||||
)
|
||||
api_key = models.BinaryField(
|
||||
blank=False, null=False, help_text="Encrypted API key for the LLM service"
|
||||
)
|
||||
model = models.CharField(
|
||||
max_length=50,
|
||||
choices=ModelChoices.choices,
|
||||
blank=False,
|
||||
null=False,
|
||||
default=ModelChoices.GPT_4O_2024_08_06,
|
||||
help_text="Must be one of the supported model names",
|
||||
)
|
||||
temperature = models.FloatField(default=0, help_text="Must be between 0 and 1")
|
||||
max_tokens = models.IntegerField(
|
||||
default=4000, help_text="Must be between 500 and 5000"
|
||||
)
|
||||
business_context = models.TextField(
|
||||
blank=True,
|
||||
null=False,
|
||||
default="",
|
||||
help_text="Additional business context for this AI model configuration",
|
||||
)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Validate temperature
|
||||
if not 0 <= self.temperature <= 1:
|
||||
raise ModelValidationError(
|
||||
detail="Temperature must be between 0 and 1",
|
||||
code="invalid_temperature",
|
||||
pointer="/data/attributes/temperature",
|
||||
)
|
||||
|
||||
# Validate max_tokens
|
||||
if not 500 <= self.max_tokens <= 5000:
|
||||
raise ModelValidationError(
|
||||
detail="Max tokens must be between 500 and 5000",
|
||||
code="invalid_max_tokens",
|
||||
pointer="/data/attributes/max_tokens",
|
||||
)
|
||||
|
||||
@property
|
||||
def api_key_decoded(self):
|
||||
"""Return the decrypted API key, or None if unavailable or invalid."""
|
||||
if not self.api_key:
|
||||
return None
|
||||
|
||||
try:
|
||||
decrypted_key = fernet.decrypt(bytes(self.api_key))
|
||||
return decrypted_key.decode()
|
||||
|
||||
except InvalidToken:
|
||||
logger.warning("Invalid token while decrypting API key.")
|
||||
except Exception as e:
|
||||
logger.exception("Unexpected error while decrypting API key: %s", e)
|
||||
|
||||
@api_key_decoded.setter
|
||||
def api_key_decoded(self, value):
|
||||
"""Store the encrypted API key."""
|
||||
if not value:
|
||||
raise ModelValidationError(
|
||||
detail="API key is required",
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
|
||||
# Validate OpenAI API key format
|
||||
openai_key_pattern = r"^sk-[\w-]+T3BlbkFJ[\w-]+$"
|
||||
if not re.match(openai_key_pattern, value):
|
||||
raise ModelValidationError(
|
||||
detail="Invalid OpenAI API key format.",
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
self.api_key = fernet.encrypt(value.encode())
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.full_clean()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "lighthouse_configurations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
# Add unique constraint for name within a tenant
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id"], name="unique_lighthouse_config_per_tenant"
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "lighthouse-configurations"
|
||||
|
||||
|
||||
class Processor(RowLevelSecurityProtectedModel):
|
||||
class ProcessorChoices(models.TextChoices):
|
||||
MUTELIST = "mutelist", _("Mutelist")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
processor_type = ProcessorTypeEnumField(choices=ProcessorChoices.choices)
|
||||
configuration = models.JSONField(default=dict)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "processors"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "processor_type"),
|
||||
name="unique_processor_types_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
name="processor_tenant_id_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "processor_type"],
|
||||
name="processor_tenant_type_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "processors"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,7 @@ def test_basic_authentication():
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password@1"
|
||||
|
||||
# Check that a 401 is returned when no basic authentication is provided
|
||||
no_auth_response = client.get(reverse("provider-list"))
|
||||
@@ -108,7 +108,7 @@ def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fi
|
||||
user1_email = "user1@testing.com"
|
||||
user2_email = "user2@testing.com"
|
||||
|
||||
password = "thisisapassword123"
|
||||
password = "Thisisapassword123@"
|
||||
|
||||
user1_response = client.post(
|
||||
reverse("user-list"),
|
||||
@@ -187,7 +187,7 @@ class TestTokenSwitchTenant:
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password1@"
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
|
||||
@@ -17,7 +17,7 @@ def test_delete_provider_without_executing_task(
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password1@"
|
||||
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialLogin
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
from api.adapters import ProwlerSocialAccountAdapter
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Membership, SAMLConfiguration, Tenant
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
@@ -27,7 +25,8 @@ class TestProwlerSocialAccountAdapter:
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.provider = "saml"
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.user = create_test_user
|
||||
sociallogin.connect = MagicMock()
|
||||
@@ -46,7 +45,9 @@ class TestProwlerSocialAccountAdapter:
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.provider = "github"
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.user = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.connect = MagicMock()
|
||||
|
||||
@@ -54,29 +55,23 @@ class TestProwlerSocialAccountAdapter:
|
||||
|
||||
sociallogin.connect.assert_not_called()
|
||||
|
||||
def test_save_user_saml_flow(
|
||||
self,
|
||||
rf,
|
||||
saml_setup,
|
||||
saml_sociallogin,
|
||||
):
|
||||
def test_save_user_saml_sets_session_flag(self, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
request = rf.get("/")
|
||||
saml_sociallogin.user.email = saml_setup["email"]
|
||||
request.session = {}
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).get(
|
||||
id=saml_setup["tenant_id"]
|
||||
)
|
||||
saml_config = SAMLConfiguration.objects.using(MainRouter.admin_db).get(
|
||||
tenant=tenant
|
||||
)
|
||||
assert saml_config.email_domain == saml_setup["domain"]
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.extra_data = {}
|
||||
|
||||
user = adapter.save_user(request, saml_sociallogin)
|
||||
mock_user = MagicMock()
|
||||
mock_user.id = 123
|
||||
|
||||
assert user.email == saml_setup["email"]
|
||||
assert (
|
||||
Membership.objects.using(MainRouter.admin_db)
|
||||
.filter(user=user, tenant=tenant)
|
||||
.exists()
|
||||
)
|
||||
with patch("api.adapters.super") as mock_super:
|
||||
with patch("api.adapters.transaction"):
|
||||
with patch("api.adapters.MainRouter"):
|
||||
mock_super.return_value.save_user.return_value = mock_user
|
||||
adapter.save_user(request, sociallogin)
|
||||
assert request.session["saml_user_created"] == "123"
|
||||
|
||||
@@ -218,6 +218,10 @@ class TestCompliance:
|
||||
Description="Description of requirement 1",
|
||||
Attributes=[],
|
||||
Checks=["check1", "check2"],
|
||||
Tactics=["tactic1"],
|
||||
SubTechniques=["subtechnique1"],
|
||||
Platforms=["platform1"],
|
||||
TechniqueURL="https://example.com",
|
||||
)
|
||||
requirement2 = MagicMock(
|
||||
Id="requirement2",
|
||||
@@ -225,6 +229,10 @@ class TestCompliance:
|
||||
Description="Description of requirement 2",
|
||||
Attributes=[],
|
||||
Checks=[],
|
||||
Tactics=[],
|
||||
SubTechniques=[],
|
||||
Platforms=[],
|
||||
TechniqueURL="",
|
||||
)
|
||||
compliance1 = MagicMock(
|
||||
Requirements=[requirement1, requirement2],
|
||||
@@ -247,6 +255,10 @@ class TestCompliance:
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"tactics": ["tactic1"],
|
||||
"subtechniques": ["subtechnique1"],
|
||||
"platforms": ["platform1"],
|
||||
"technique_url": "https://example.com",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
@@ -260,6 +272,10 @@ class TestCompliance:
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"description": "Description of requirement 2",
|
||||
"tactics": [],
|
||||
"subtechniques": [],
|
||||
"platforms": [],
|
||||
"technique_url": "",
|
||||
"attributes": [],
|
||||
"checks": {},
|
||||
"checks_status": {
|
||||
|
||||
@@ -13,6 +13,7 @@ from api.db_utils import (
|
||||
enum_to_choices,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
update_objects_in_batches,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
@@ -227,3 +228,88 @@ class TestCreateObjectsInBatches:
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUpdateObjectsInBatches:
|
||||
@pytest.fixture
|
||||
def tenant(self, tenants_fixture):
|
||||
return tenants_fixture[0]
|
||||
|
||||
def make_provider_instances(self, tenant, count):
|
||||
"""
|
||||
Return a list of `count` unsaved Provider instances for the given tenant.
|
||||
"""
|
||||
base_uid = 2000
|
||||
return [
|
||||
Provider(
|
||||
tenant=tenant,
|
||||
uid=str(base_uid + i),
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
for i in range(count)
|
||||
]
|
||||
|
||||
def test_exact_multiple_of_batch(self, tenant):
|
||||
total = 6
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
# Fetch them back, mutate the `uid` field, then update in batches
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
def test_non_multiple_of_batch(self, tenant):
|
||||
total = 7
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
def test_batch_size_default(self, tenant):
|
||||
default_size = settings.DJANGO_DELETION_BATCH_SIZE
|
||||
total = default_size + 2
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs)
|
||||
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
# Update without specifying batch_size (uses default)
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
@@ -3,7 +3,7 @@ from allauth.socialaccount.models import SocialApp
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Resource, ResourceTag, SAMLConfiguration, Tenant
|
||||
from api.models import Resource, ResourceTag, SAMLConfiguration, SAMLDomainIndex
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -142,8 +142,8 @@ class TestSAMLConfigurationModel:
|
||||
</md:EntityDescriptor>
|
||||
"""
|
||||
|
||||
def test_creates_valid_configuration(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant A")
|
||||
def test_creates_valid_configuration(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="ssoexample.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
@@ -153,8 +153,8 @@ class TestSAMLConfigurationModel:
|
||||
assert config.email_domain == "ssoexample.com"
|
||||
assert SocialApp.objects.filter(client_id="ssoexample.com").exists()
|
||||
|
||||
def test_email_domain_with_at_symbol_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant B")
|
||||
def test_email_domain_with_at_symbol_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration(
|
||||
email_domain="invalid@domain.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
@@ -168,9 +168,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "email_domain" in errors
|
||||
assert "Domain must not contain @" in errors["email_domain"][0]
|
||||
|
||||
def test_duplicate_email_domain_fails(self):
|
||||
tenant1 = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant C1")
|
||||
tenant2 = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant C2")
|
||||
def test_duplicate_email_domain_fails(self, tenants_fixture):
|
||||
tenant1, tenant2, *_ = tenants_fixture
|
||||
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="duplicate.com",
|
||||
@@ -191,8 +190,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "tenant" in errors
|
||||
assert "There is a problem with your email domain." in errors["tenant"][0]
|
||||
|
||||
def test_duplicate_tenant_config_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant D")
|
||||
def test_duplicate_tenant_config_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="unique1.com",
|
||||
@@ -216,8 +215,8 @@ class TestSAMLConfigurationModel:
|
||||
in errors["tenant"][0]
|
||||
)
|
||||
|
||||
def test_invalid_metadata_xml_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant E")
|
||||
def test_invalid_metadata_xml_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration(
|
||||
email_domain="brokenxml.com",
|
||||
metadata_xml="<bad<xml>",
|
||||
@@ -232,8 +231,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "Invalid XML" in errors["metadata_xml"][0]
|
||||
assert "not well-formed" in errors["metadata_xml"][0]
|
||||
|
||||
def test_metadata_missing_sso_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant F")
|
||||
def test_metadata_missing_sso_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
<md:IDPSSODescriptor></md:IDPSSODescriptor>
|
||||
</md:EntityDescriptor>"""
|
||||
@@ -250,8 +249,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "metadata_xml" in errors
|
||||
assert "Missing SingleSignOnService" in errors["metadata_xml"][0]
|
||||
|
||||
def test_metadata_missing_certificate_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant G")
|
||||
def test_metadata_missing_certificate_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
<md:IDPSSODescriptor>
|
||||
<md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" Location="https://example.com/sso"/>
|
||||
@@ -269,3 +268,59 @@ class TestSAMLConfigurationModel:
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "X509Certificate" in errors["metadata_xml"][0]
|
||||
|
||||
def test_deletes_saml_configuration_and_related_objects(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
email_domain = "deleteme.com"
|
||||
|
||||
# Create the configuration
|
||||
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain=email_domain,
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
# Verify that the SocialApp and SAMLDomainIndex exist
|
||||
assert SocialApp.objects.filter(client_id=email_domain).exists()
|
||||
assert (
|
||||
SAMLDomainIndex.objects.using(MainRouter.admin_db)
|
||||
.filter(email_domain=email_domain)
|
||||
.exists()
|
||||
)
|
||||
|
||||
# Delete the configuration
|
||||
config.delete()
|
||||
|
||||
# Verify that the configuration and its related objects are deleted
|
||||
assert (
|
||||
not SAMLConfiguration.objects.using(MainRouter.admin_db)
|
||||
.filter(pk=config.pk)
|
||||
.exists()
|
||||
)
|
||||
assert not SocialApp.objects.filter(client_id=email_domain).exists()
|
||||
assert (
|
||||
not SAMLDomainIndex.objects.using(MainRouter.admin_db)
|
||||
.filter(email_domain=email_domain)
|
||||
.exists()
|
||||
)
|
||||
|
||||
def test_duplicate_entity_id_fails_on_creation(self, tenants_fixture):
|
||||
tenant1, tenant2, *_ = tenants_fixture
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="first.com",
|
||||
metadata_xml=self.VALID_METADATA,
|
||||
tenant=tenant1,
|
||||
)
|
||||
|
||||
config = SAMLConfiguration(
|
||||
email_domain="second.com",
|
||||
metadata_xml=self.VALID_METADATA,
|
||||
tenant=tenant2,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config.save()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "There is a problem with your metadata." in errors["metadata_xml"][0]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import TODAY
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
@@ -60,7 +61,7 @@ class TestUserViewSet:
|
||||
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"password": "Newpassword123@",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_rbac.post(
|
||||
@@ -74,7 +75,7 @@ class TestUserViewSet:
|
||||
):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"password": "Newpassword123@",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
@@ -321,7 +322,7 @@ class TestProviderViewSet:
|
||||
@pytest.mark.django_db
|
||||
class TestLimitedVisibility:
|
||||
TEST_EMAIL = "rbac@rbac.com"
|
||||
TEST_PASSWORD = "thisisapassword123"
|
||||
TEST_PASSWORD = "Thisisapassword123@"
|
||||
|
||||
@pytest.fixture
|
||||
def limited_admin_user(
|
||||
@@ -409,3 +410,87 @@ class TestLimitedVisibility:
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
|
||||
def test_overviews_providers(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"endpoint_name",
|
||||
[
|
||||
"findings",
|
||||
"findings_severity",
|
||||
],
|
||||
)
|
||||
def test_overviews_findings(
|
||||
self,
|
||||
endpoint_name,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
values = response.json()["data"]["attributes"].values()
|
||||
assert any(value > 0 for value in values)
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]["attributes"].values()
|
||||
assert all(value == 0 for value in data)
|
||||
|
||||
def test_overviews_services(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
80
api/src/backend/api/tests/test_sentry.py
Normal file
80
api/src/backend/api/tests/test_sentry.py
Normal file
@@ -0,0 +1,80 @@
|
||||
import logging
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from config.settings.sentry import before_send
|
||||
|
||||
|
||||
def test_before_send_ignores_log_with_ignored_exception():
|
||||
"""Test that before_send ignores logs containing ignored exceptions."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Provider kubernetes is not connected"
|
||||
log_record.levelno = logging.ERROR # 40
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_before_send_ignores_exception_with_ignored_exception():
|
||||
"""Test that before_send ignores exceptions containing ignored exceptions."""
|
||||
exc_info = (Exception, Exception("Provider kubernetes is not connected"), None)
|
||||
|
||||
hint = {"exc_info": exc_info}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_before_send_passes_through_non_ignored_log():
|
||||
"""Test that before_send passes through logs that don't contain ignored exceptions."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Some other error message"
|
||||
log_record.levelno = logging.ERROR # 40
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was passed through
|
||||
assert result == event
|
||||
|
||||
|
||||
def test_before_send_passes_through_non_ignored_exception():
|
||||
"""Test that before_send passes through exceptions that don't contain ignored exceptions."""
|
||||
exc_info = (Exception, Exception("Some other error message"), None)
|
||||
|
||||
hint = {"exc_info": exc_info}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was passed through
|
||||
assert result == event
|
||||
|
||||
|
||||
def test_before_send_handles_warning_level():
|
||||
"""Test that before_send handles warning level logs."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Provider kubernetes is not connected"
|
||||
log_record.levelno = logging.WARNING # 30
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
@@ -131,6 +131,21 @@ class TestInitializeProwlerProvider:
|
||||
initialize_prowler_provider(provider)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(key="value")
|
||||
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_initialize_prowler_provider_with_mutelist(
|
||||
self, mock_return_prowler_provider
|
||||
):
|
||||
provider = MagicMock()
|
||||
provider.secret.secret = {"key": "value"}
|
||||
mutelist_processor = MagicMock()
|
||||
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
|
||||
initialize_prowler_provider(provider, mutelist_processor)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(
|
||||
key="value", mutelist_content={"key": "value"}
|
||||
)
|
||||
|
||||
|
||||
class TestProwlerProviderConnectionTest:
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
@@ -200,6 +215,25 @@ class TestGetProwlerProviderKwargs:
|
||||
expected_result = {**secret_dict, **expected_extra_kwargs}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_with_mutelist(self):
|
||||
provider_uid = "provider_uid"
|
||||
secret_dict = {"key": "value"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
mutelist_processor = MagicMock()
|
||||
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.AWS.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
|
||||
expected_result = {**secret_dict, "mutelist_content": {"key": "value"}}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_unsupported_provider(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
@@ -254,7 +288,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="user@example.com"
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_validation_error(self):
|
||||
@@ -269,7 +303,7 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_not_found(self):
|
||||
@@ -284,7 +318,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert exc_info.value.detail == "Invitation is not valid."
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_expired(self, invitation):
|
||||
@@ -332,5 +366,27 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="different@example.com"
|
||||
token="VALID_TOKEN", email__iexact="different@example.com"
|
||||
)
|
||||
|
||||
def test_valid_invitation_uppercase_email(self):
|
||||
"""Test that validate_invitation works with case-insensitive email lookup."""
|
||||
uppercase_email = "USER@example.com"
|
||||
|
||||
invitation = MagicMock(spec=Invitation)
|
||||
invitation.token = "VALID_TOKEN"
|
||||
invitation.email = uppercase_email
|
||||
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
|
||||
invitation.state = Invitation.State.PENDING
|
||||
invitation.tenant = MagicMock()
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
result = validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,12 +7,13 @@ from rest_framework.exceptions import NotFound, ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Invitation, Provider, Resource
|
||||
from api.models import Invitation, Processor, Provider, Resource
|
||||
from api.v1.serializers import FindingMetadataSerializer
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.common.models import Connection
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.github.github_provider import GithubProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from prowler.providers.m365.m365_provider import M365Provider
|
||||
|
||||
@@ -55,14 +56,21 @@ def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
|
||||
def return_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider]:
|
||||
) -> [
|
||||
AwsProvider
|
||||
| AzureProvider
|
||||
| GcpProvider
|
||||
| GithubProvider
|
||||
| KubernetesProvider
|
||||
| M365Provider
|
||||
]:
|
||||
"""Return the Prowler provider class based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: The corresponding provider class.
|
||||
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: The corresponding provider class.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type specified in `provider.provider` is not supported.
|
||||
@@ -78,16 +86,21 @@ def return_prowler_provider(
|
||||
prowler_provider = KubernetesProvider
|
||||
case Provider.ProviderChoices.M365.value:
|
||||
prowler_provider = M365Provider
|
||||
case Provider.ProviderChoices.GITHUB.value:
|
||||
prowler_provider = GithubProvider
|
||||
case _:
|
||||
raise ValueError(f"Provider type {provider.provider} not supported")
|
||||
return prowler_provider
|
||||
|
||||
|
||||
def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
def get_prowler_provider_kwargs(
|
||||
provider: Provider, mutelist_processor: Processor | None = None
|
||||
) -> dict:
|
||||
"""Get the Prowler provider kwargs based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secret.
|
||||
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
|
||||
|
||||
Returns:
|
||||
dict: The provider kwargs for the corresponding provider class.
|
||||
@@ -105,24 +118,39 @@ def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
}
|
||||
elif provider.provider == Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider_kwargs = {**prowler_provider_kwargs, "context": provider.uid}
|
||||
|
||||
if mutelist_processor:
|
||||
mutelist_content = mutelist_processor.configuration.get("Mutelist", {})
|
||||
if mutelist_content:
|
||||
prowler_provider_kwargs["mutelist_content"] = mutelist_content
|
||||
|
||||
return prowler_provider_kwargs
|
||||
|
||||
|
||||
def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider:
|
||||
mutelist_processor: Processor | None = None,
|
||||
) -> (
|
||||
AwsProvider
|
||||
| AzureProvider
|
||||
| GcpProvider
|
||||
| GithubProvider
|
||||
| KubernetesProvider
|
||||
| M365Provider
|
||||
):
|
||||
"""Initialize a Prowler provider instance based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
|
||||
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `GithubProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
|
||||
provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
return prowler_provider(**prowler_provider_kwargs)
|
||||
|
||||
|
||||
@@ -187,7 +215,7 @@ def validate_invitation(
|
||||
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
|
||||
# is not a member of yet
|
||||
invitation = Invitation.objects.using(MainRouter.admin_db).get(
|
||||
token=invitation_token, email=email
|
||||
token=invitation_token, email__iexact=email
|
||||
)
|
||||
except Invitation.DoesNotExist:
|
||||
if raise_not_found:
|
||||
|
||||
@@ -24,20 +24,32 @@ class PaginateByPkMixin:
|
||||
request, # noqa: F841
|
||||
base_queryset,
|
||||
manager,
|
||||
select_related: list[str] | None = None,
|
||||
prefetch_related: list[str] | None = None,
|
||||
select_related: list | None = None,
|
||||
prefetch_related: list | None = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Paginate a queryset by primary key.
|
||||
|
||||
This method is useful when you want to paginate a queryset that has been
|
||||
filtered or annotated in a way that would be lost if you used the default
|
||||
pagination method.
|
||||
"""
|
||||
pk_list = base_queryset.values_list("id", flat=True)
|
||||
page = self.paginate_queryset(pk_list)
|
||||
if page is None:
|
||||
return Response(self.get_serializer(base_queryset, many=True).data)
|
||||
|
||||
queryset = manager.filter(id__in=page)
|
||||
|
||||
if select_related:
|
||||
queryset = queryset.select_related(*select_related)
|
||||
if prefetch_related:
|
||||
queryset = queryset.prefetch_related(*prefetch_related)
|
||||
|
||||
# Optimize tags loading, if applicable
|
||||
if hasattr(self, "_optimize_tags_loading"):
|
||||
queryset = self._optimize_tags_loading(queryset)
|
||||
|
||||
queryset = sorted(queryset, key=lambda obj: page.index(obj.id))
|
||||
|
||||
serialized = self.get_serializer(queryset, many=True).data
|
||||
|
||||
23
api/src/backend/api/v1/serializer_utils/base.py
Normal file
23
api/src/backend/api/v1/serializer_utils/base.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import yaml
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
class YamlOrJsonField(serializers.JSONField):
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, str):
|
||||
try:
|
||||
data = yaml.safe_load(data)
|
||||
except yaml.YAMLError as exc:
|
||||
raise serializers.ValidationError("Invalid YAML format") from exc
|
||||
return super().to_internal_value(data)
|
||||
@@ -1,19 +1,7 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
from api.v1.serializer_utils.base import BaseValidateSerializer
|
||||
|
||||
|
||||
class S3ConfigSerializer(BaseValidateSerializer):
|
||||
|
||||
21
api/src/backend/api/v1/serializer_utils/processors.py
Normal file
21
api/src/backend/api/v1/serializer_utils/processors.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
|
||||
from api.v1.serializer_utils.base import YamlOrJsonField
|
||||
|
||||
from prowler.lib.mutelist.mutelist import mutelist_schema
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Mutelist",
|
||||
"properties": {"Mutelist": mutelist_schema},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class ProcessorConfigField(YamlOrJsonField):
|
||||
pass
|
||||
@@ -176,6 +176,43 @@ from rest_framework_json_api import serializers
|
||||
},
|
||||
"required": ["kubeconfig_content"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub Personal Access Token",
|
||||
"properties": {
|
||||
"personal_access_token": {
|
||||
"type": "string",
|
||||
"description": "GitHub personal access token for authentication.",
|
||||
}
|
||||
},
|
||||
"required": ["personal_access_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub OAuth App Token",
|
||||
"properties": {
|
||||
"oauth_app_token": {
|
||||
"type": "string",
|
||||
"description": "GitHub OAuth App token for authentication.",
|
||||
}
|
||||
},
|
||||
"required": ["oauth_app_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub App Credentials",
|
||||
"properties": {
|
||||
"github_app_id": {
|
||||
"type": "integer",
|
||||
"description": "GitHub App ID for authentication.",
|
||||
},
|
||||
"github_app_key": {
|
||||
"type": "string",
|
||||
"description": "Path to the GitHub App private key file.",
|
||||
},
|
||||
},
|
||||
"required": ["github_app_id", "github_app_key"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
@@ -7,7 +7,9 @@ from django.contrib.auth.models import update_last_login
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from jwt.exceptions import InvalidKeyError
|
||||
from rest_framework.validators import UniqueTogetherValidator
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.relations import SerializerMethodResourceRelatedField
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError
|
||||
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
|
||||
@@ -19,7 +21,9 @@ from api.models import (
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
InvitationRoleRelationship,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
@@ -43,7 +47,9 @@ from api.v1.serializer_utils.integrations import (
|
||||
IntegrationCredentialField,
|
||||
S3ConfigSerializer,
|
||||
)
|
||||
from api.v1.serializer_utils.processors import ProcessorConfigField
|
||||
from api.v1.serializer_utils.providers import ProviderSecretField
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
|
||||
# Tokens
|
||||
|
||||
@@ -129,6 +135,12 @@ class TokenSerializer(BaseTokenSerializer):
|
||||
|
||||
class TokenSocialLoginSerializer(BaseTokenSerializer):
|
||||
email = serializers.EmailField(write_only=True)
|
||||
tenant_id = serializers.UUIDField(
|
||||
write_only=True,
|
||||
required=False,
|
||||
help_text="If not provided, the tenant ID of the first membership that was added"
|
||||
" to the user will be used.",
|
||||
)
|
||||
|
||||
# Output tokens
|
||||
refresh = serializers.CharField(read_only=True)
|
||||
@@ -850,6 +862,7 @@ class ScanSerializer(RLSSerializer):
|
||||
"completed_at",
|
||||
"scheduled_at",
|
||||
"next_scan_at",
|
||||
"processor",
|
||||
"url",
|
||||
]
|
||||
|
||||
@@ -970,6 +983,14 @@ class ScanComplianceReportSerializer(serializers.Serializer):
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ScanThreatscoreReportSerializer(serializers.Serializer):
|
||||
id = serializers.CharField(source="scan")
|
||||
|
||||
class Meta:
|
||||
resource_name = "scan-reports"
|
||||
fields = ["id"]
|
||||
|
||||
|
||||
class ResourceTagSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the ResourceTag model
|
||||
@@ -987,8 +1008,12 @@ class ResourceSerializer(RLSSerializer):
|
||||
|
||||
tags = serializers.SerializerMethodField()
|
||||
type_ = serializers.CharField(read_only=True)
|
||||
failed_findings_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
findings = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
findings = SerializerMethodResourceRelatedField(
|
||||
many=True,
|
||||
read_only=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
@@ -1004,6 +1029,7 @@ class ResourceSerializer(RLSSerializer):
|
||||
"tags",
|
||||
"provider",
|
||||
"findings",
|
||||
"failed_findings_count",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
@@ -1013,8 +1039,8 @@ class ResourceSerializer(RLSSerializer):
|
||||
}
|
||||
|
||||
included_serializers = {
|
||||
"findings": "api.v1.serializers.FindingSerializer",
|
||||
"provider": "api.v1.serializers.ProviderSerializer",
|
||||
"findings": "api.v1.serializers.FindingIncludeSerializer",
|
||||
"provider": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
}
|
||||
|
||||
@extend_schema_field(
|
||||
@@ -1025,6 +1051,10 @@ class ResourceSerializer(RLSSerializer):
|
||||
}
|
||||
)
|
||||
def get_tags(self, obj):
|
||||
# Use prefetched tags if available to avoid N+1 queries
|
||||
if hasattr(obj, "prefetched_tags"):
|
||||
return {tag.key: tag.value for tag in obj.prefetched_tags}
|
||||
# Fallback to the original method if prefetch is not available
|
||||
return obj.get_tags(self.context.get("tenant_id"))
|
||||
|
||||
def get_fields(self):
|
||||
@@ -1034,10 +1064,17 @@ class ResourceSerializer(RLSSerializer):
|
||||
fields["type"] = type_
|
||||
return fields
|
||||
|
||||
def get_findings(self, obj):
|
||||
return (
|
||||
obj.latest_findings
|
||||
if hasattr(obj, "latest_findings")
|
||||
else obj.findings.all()
|
||||
)
|
||||
|
||||
|
||||
class ResourceIncludeSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Resource model.
|
||||
Serializer for the included Resource model.
|
||||
"""
|
||||
|
||||
tags = serializers.SerializerMethodField()
|
||||
@@ -1070,6 +1107,10 @@ class ResourceIncludeSerializer(RLSSerializer):
|
||||
}
|
||||
)
|
||||
def get_tags(self, obj):
|
||||
# Use prefetched tags if available to avoid N+1 queries
|
||||
if hasattr(obj, "prefetched_tags"):
|
||||
return {tag.key: tag.value for tag in obj.prefetched_tags}
|
||||
# Fallback to the original method if prefetch is not available
|
||||
return obj.get_tags(self.context.get("tenant_id"))
|
||||
|
||||
def get_fields(self):
|
||||
@@ -1080,6 +1121,17 @@ class ResourceIncludeSerializer(RLSSerializer):
|
||||
return fields
|
||||
|
||||
|
||||
class ResourceMetadataSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
types = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# tags = serializers.JSONField(help_text="Tags are described as key-value pairs.")
|
||||
|
||||
class Meta:
|
||||
resource_name = "resources-metadata"
|
||||
|
||||
|
||||
class FindingSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Finding model.
|
||||
@@ -1103,6 +1155,7 @@ class FindingSerializer(RLSSerializer):
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"muted_reason",
|
||||
"url",
|
||||
# Relationships
|
||||
"scan",
|
||||
@@ -1115,6 +1168,28 @@ class FindingSerializer(RLSSerializer):
|
||||
}
|
||||
|
||||
|
||||
class FindingIncludeSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the include Finding model.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = [
|
||||
"id",
|
||||
"uid",
|
||||
"status",
|
||||
"severity",
|
||||
"check_id",
|
||||
"check_metadata",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"muted_reason",
|
||||
]
|
||||
|
||||
|
||||
# To be removed when the related endpoint is removed as well
|
||||
class FindingDynamicFilterSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
@@ -1150,6 +1225,8 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
|
||||
serializer = AzureProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.GCP.value:
|
||||
serializer = GCPProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.GITHUB.value:
|
||||
serializer = GithubProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.KUBERNETES.value:
|
||||
serializer = KubernetesProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.M365.value:
|
||||
@@ -1199,8 +1276,8 @@ class M365ProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
tenant_id = serializers.CharField()
|
||||
user = serializers.EmailField()
|
||||
password = serializers.CharField()
|
||||
user = serializers.EmailField(required=False)
|
||||
password = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
@@ -1229,6 +1306,16 @@ class KubernetesProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class GithubProviderSecret(serializers.Serializer):
|
||||
personal_access_token = serializers.CharField(required=False)
|
||||
oauth_app_token = serializers.CharField(required=False)
|
||||
github_app_id = serializers.IntegerField(required=False)
|
||||
github_app_key_content = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
role_arn = serializers.CharField()
|
||||
external_id = serializers.CharField()
|
||||
@@ -1308,12 +1395,13 @@ class ProviderSecretUpdateSerializer(BaseWriteProviderSecretSerializer):
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"provider": {"read_only": True},
|
||||
"secret_type": {"read_only": True},
|
||||
"secret_type": {"required": False},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
provider = self.instance.provider
|
||||
secret_type = self.instance.secret_type
|
||||
# To allow updating a secret with the same type without making the `secret_type` mandatory
|
||||
secret_type = attrs.get("secret_type") or self.instance.secret_type
|
||||
secret = attrs.get("secret")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
@@ -1722,6 +1810,8 @@ class ComplianceOverviewDetailSerializer(serializers.Serializer):
|
||||
|
||||
class ComplianceOverviewAttributesSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
framework_description = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
version = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
@@ -2062,6 +2152,128 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
# Processors
|
||||
|
||||
|
||||
class ProcessorSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Processor model.
|
||||
"""
|
||||
|
||||
configuration = ProcessorConfigField()
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"processor_type",
|
||||
"configuration",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
||||
class ProcessorCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
configuration = ProcessorConfigField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"processor_type",
|
||||
"configuration",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
validators = [
|
||||
UniqueTogetherValidator(
|
||||
queryset=Processor.objects.all(),
|
||||
fields=["processor_type"],
|
||||
message="A processor with the same type already exists.",
|
||||
)
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_processor_data(attrs)
|
||||
return validated_attrs
|
||||
|
||||
def validate_processor_data(self, attrs):
|
||||
processor_type = attrs.get("processor_type")
|
||||
configuration = attrs.get("configuration")
|
||||
if processor_type == "mutelist":
|
||||
self.validate_mutelist_configuration(configuration)
|
||||
|
||||
def validate_mutelist_configuration(self, configuration):
|
||||
if not isinstance(configuration, dict):
|
||||
raise serializers.ValidationError("Invalid Mutelist configuration.")
|
||||
|
||||
mutelist_configuration = configuration.get("Mutelist", {})
|
||||
|
||||
if not mutelist_configuration:
|
||||
raise serializers.ValidationError(
|
||||
"Invalid Mutelist configuration: 'Mutelist' is a required property."
|
||||
)
|
||||
|
||||
try:
|
||||
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
|
||||
return
|
||||
except Exception as error:
|
||||
raise serializers.ValidationError(
|
||||
f"Invalid Mutelist configuration: {error}"
|
||||
)
|
||||
|
||||
|
||||
class ProcessorUpdateSerializer(BaseWriteSerializer):
|
||||
configuration = ProcessorConfigField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"configuration",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_processor_data(attrs)
|
||||
return validated_attrs
|
||||
|
||||
def validate_processor_data(self, attrs):
|
||||
processor_type = self.instance.processor_type
|
||||
configuration = attrs.get("configuration")
|
||||
if processor_type == "mutelist":
|
||||
self.validate_mutelist_configuration(configuration)
|
||||
|
||||
def validate_mutelist_configuration(self, configuration):
|
||||
if not isinstance(configuration, dict):
|
||||
raise serializers.ValidationError("Invalid Mutelist configuration.")
|
||||
|
||||
mutelist_configuration = configuration.get("Mutelist", {})
|
||||
|
||||
if not mutelist_configuration:
|
||||
raise serializers.ValidationError(
|
||||
"Invalid Mutelist configuration: 'Mutelist' is a required property."
|
||||
)
|
||||
|
||||
try:
|
||||
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
|
||||
return
|
||||
except Exception as error:
|
||||
raise serializers.ValidationError(
|
||||
f"Invalid Mutelist configuration: {error}"
|
||||
)
|
||||
|
||||
|
||||
# SSO
|
||||
|
||||
|
||||
@@ -2082,3 +2294,134 @@ class SAMLConfigurationSerializer(RLSSerializer):
|
||||
model = SAMLConfiguration
|
||||
fields = ["id", "email_domain", "metadata_xml", "created_at", "updated_at"]
|
||||
read_only_fields = ["id", "created_at", "updated_at"]
|
||||
|
||||
|
||||
class LighthouseConfigSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the LighthouseConfig model.
|
||||
"""
|
||||
|
||||
api_key = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Check if api_key is specifically requested in fields param
|
||||
fields_param = self.context.get("request", None) and self.context[
|
||||
"request"
|
||||
].query_params.get("fields[lighthouse-config]", "")
|
||||
if fields_param == "api_key":
|
||||
# Return decrypted key if specifically requested
|
||||
data["api_key"] = instance.api_key_decoded if instance.api_key else None
|
||||
else:
|
||||
# Return masked key for general requests
|
||||
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
|
||||
return data
|
||||
|
||||
|
||||
class LighthouseConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""Serializer for creating new Lighthouse configurations."""
|
||||
|
||||
api_key = serializers.CharField(write_only=True, required=True)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
tenant_id = self.context.get("request").tenant_id
|
||||
if LighthouseConfiguration.objects.filter(tenant_id=tenant_id).exists():
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"tenant_id": "Lighthouse configuration already exists for this tenant."
|
||||
}
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
api_key = validated_data.pop("api_key")
|
||||
instance = super().create(validated_data)
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Always mask the API key in the response
|
||||
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
|
||||
return data
|
||||
|
||||
|
||||
class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for updating LighthouseConfig instances.
|
||||
"""
|
||||
|
||||
api_key = serializers.CharField(write_only=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"name": {"required": False},
|
||||
"model": {"required": False},
|
||||
"temperature": {"required": False},
|
||||
"max_tokens": {"required": False},
|
||||
}
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
api_key = validated_data.pop("api_key", None)
|
||||
instance = super().update(instance, validated_data)
|
||||
if api_key:
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from allauth.socialaccount.providers.saml.views import ACSView, MetadataView, SLSView
|
||||
from django.urls import include, path
|
||||
from drf_spectacular.views import SpectacularRedocView
|
||||
from rest_framework_nested import routers
|
||||
|
||||
from api.v1.views import (
|
||||
ComplianceOverviewViewSet,
|
||||
CustomSAMLLoginView,
|
||||
CustomTokenObtainView,
|
||||
CustomTokenRefreshView,
|
||||
CustomTokenSwitchTenantView,
|
||||
@@ -13,8 +15,10 @@ from api.v1.views import (
|
||||
IntegrationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
InvitationViewSet,
|
||||
LighthouseConfigViewSet,
|
||||
MembershipViewSet,
|
||||
OverviewViewSet,
|
||||
ProcessorViewSet,
|
||||
ProviderGroupProvidersRelationshipView,
|
||||
ProviderGroupViewSet,
|
||||
ProviderSecretViewSet,
|
||||
@@ -24,6 +28,7 @@ from api.v1.views import (
|
||||
RoleViewSet,
|
||||
SAMLConfigurationViewSet,
|
||||
SAMLInitiateAPIView,
|
||||
SAMLTokenValidateView,
|
||||
ScanViewSet,
|
||||
ScheduleViewSet,
|
||||
SchemaView,
|
||||
@@ -52,7 +57,13 @@ router.register(
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
router.register(r"integrations", IntegrationViewSet, basename="integration")
|
||||
router.register(r"processors", ProcessorViewSet, basename="processor")
|
||||
router.register(r"saml-config", SAMLConfigurationViewSet, basename="saml-config")
|
||||
router.register(
|
||||
r"lighthouse-configurations",
|
||||
LighthouseConfigViewSet,
|
||||
basename="lighthouseconfiguration",
|
||||
)
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
@@ -120,13 +131,32 @@ urlpatterns = [
|
||||
path(
|
||||
"auth/saml/initiate/", SAMLInitiateAPIView.as_view(), name="api_saml_initiate"
|
||||
),
|
||||
# Allauth SAML endpoints for tenants
|
||||
path("accounts/", include("allauth.urls")),
|
||||
path(
|
||||
"api/v1/accounts/saml/<organization_slug>/acs/finish/",
|
||||
"accounts/saml/<organization_slug>/login/",
|
||||
CustomSAMLLoginView.as_view(),
|
||||
name="saml_login",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/acs/",
|
||||
ACSView.as_view(),
|
||||
name="saml_acs",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/acs/finish/",
|
||||
TenantFinishACSView.as_view(),
|
||||
name="saml_finish_acs",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/sls/",
|
||||
SLSView.as_view(),
|
||||
name="saml_sls",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/metadata/",
|
||||
MetadataView.as_view(),
|
||||
name="saml_metadata",
|
||||
),
|
||||
path("tokens/saml", SAMLTokenValidateView.as_view(), name="token-saml"),
|
||||
path("tokens/google", GoogleSocialLoginView.as_view(), name="token-google"),
|
||||
path("tokens/github", GithubSocialLoginView.as_view(), name="token-github"),
|
||||
path("", include(router.urls)),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,5 @@
|
||||
import string
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -20,3 +22,89 @@ class MaximumLengthValidator:
|
||||
return _(
|
||||
f"Your password must contain no more than {self.max_length} characters."
|
||||
)
|
||||
|
||||
|
||||
class SpecialCharactersValidator:
|
||||
def __init__(self, special_characters=None, min_special_characters=1):
|
||||
# Use string.punctuation if no custom characters provided
|
||||
self.special_characters = special_characters or string.punctuation
|
||||
self.min_special_characters = min_special_characters
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if (
|
||||
sum(1 for char in password if char in self.special_characters)
|
||||
< self.min_special_characters
|
||||
):
|
||||
raise ValidationError(
|
||||
_("This password must contain at least one special character."),
|
||||
code="password_no_special_characters",
|
||||
params={
|
||||
"special_characters": self.special_characters,
|
||||
"min_special_characters": self.min_special_characters,
|
||||
},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least one special character from: {self.special_characters}"
|
||||
)
|
||||
|
||||
|
||||
class UppercaseValidator:
|
||||
def __init__(self, min_uppercase=1):
|
||||
self.min_uppercase = min_uppercase
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.isupper()) < self.min_uppercase:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_uppercase)d uppercase letter."
|
||||
),
|
||||
code="password_no_uppercase_letters",
|
||||
params={"min_uppercase": self.min_uppercase},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_uppercase} uppercase letter."
|
||||
)
|
||||
|
||||
|
||||
class LowercaseValidator:
|
||||
def __init__(self, min_lowercase=1):
|
||||
self.min_lowercase = min_lowercase
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.islower()) < self.min_lowercase:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_lowercase)d lowercase letter."
|
||||
),
|
||||
code="password_no_lowercase_letters",
|
||||
params={"min_lowercase": self.min_lowercase},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_lowercase} lowercase letter."
|
||||
)
|
||||
|
||||
|
||||
class NumericValidator:
|
||||
def __init__(self, min_numeric=1):
|
||||
self.min_numeric = min_numeric
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.isdigit()) < self.min_numeric:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_numeric)d numeric character."
|
||||
),
|
||||
code="password_no_numeric_characters",
|
||||
params={"min_numeric": self.min_numeric},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_numeric} numeric character."
|
||||
)
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import warnings
|
||||
|
||||
from celery import Celery, Task
|
||||
from config.env import env
|
||||
|
||||
# Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=UserWarning, module="dj_rest_auth.registration.serializers"
|
||||
)
|
||||
|
||||
BROKER_VISIBILITY_TIMEOUT = env.int("DJANGO_BROKER_VISIBILITY_TIMEOUT", default=86400)
|
||||
|
||||
celery_app = Celery("tasks")
|
||||
|
||||
@@ -11,6 +11,7 @@ SECRET_KEY = env("SECRET_KEY", default="secret")
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
|
||||
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
||||
USE_X_FORWARDED_HOST = True
|
||||
|
||||
# Application definition
|
||||
|
||||
@@ -131,7 +132,6 @@ DJANGO_GUID = {
|
||||
}
|
||||
|
||||
DATABASE_ROUTERS = ["api.db_router.MainRouter"]
|
||||
POSTGRES_EXTRA_DB_BACKEND_BASE = "database_backend"
|
||||
|
||||
|
||||
# Password validation
|
||||
@@ -159,6 +159,30 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.SpecialCharactersValidator",
|
||||
"OPTIONS": {
|
||||
"min_special_characters": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.UppercaseValidator",
|
||||
"OPTIONS": {
|
||||
"min_uppercase": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.LowercaseValidator",
|
||||
"OPTIONS": {
|
||||
"min_lowercase": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.NumericValidator",
|
||||
"OPTIONS": {
|
||||
"min_numeric": 1,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
SIMPLE_JWT = {
|
||||
@@ -249,3 +273,7 @@ X_FRAME_OPTIONS = "DENY"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
# SAML requirement
|
||||
CSRF_COOKIE_SECURE = True
|
||||
SESSION_COOKIE_SECURE = True
|
||||
|
||||
@@ -4,6 +4,7 @@ from config.env import env
|
||||
IGNORED_EXCEPTIONS = [
|
||||
# Provider is not connected due to credentials errors
|
||||
"is not connected",
|
||||
"ProviderConnectionError",
|
||||
# Authentication Errors from AWS
|
||||
"InvalidToken",
|
||||
"AccessDeniedException",
|
||||
@@ -16,7 +17,7 @@ IGNORED_EXCEPTIONS = [
|
||||
"InternalServerErrorException",
|
||||
"AccessDenied",
|
||||
"No Shodan API Key", # Shodan Check
|
||||
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
|
||||
"RequestLimitExceeded", # For now, we don't want to log the RequestLimitExceeded errors
|
||||
"ThrottlingException",
|
||||
"Rate exceeded",
|
||||
"SubscriptionRequiredException",
|
||||
@@ -42,7 +43,9 @@ IGNORED_EXCEPTIONS = [
|
||||
"AWSAccessKeyIDInvalidError",
|
||||
"AWSSessionTokenExpiredError",
|
||||
"EndpointConnectionError", # AWS Service is not available in a region
|
||||
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
# The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an
|
||||
# unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
"Pool is closed",
|
||||
# Authentication Errors from GCP
|
||||
"ClientAuthenticationError",
|
||||
"AuthorizationFailed",
|
||||
@@ -71,7 +74,7 @@ IGNORED_EXCEPTIONS = [
|
||||
|
||||
def before_send(event, hint):
|
||||
"""
|
||||
before_send handles the Sentry events in order to sent them or not
|
||||
before_send handles the Sentry events in order to send them or not
|
||||
"""
|
||||
# Ignore logs with the ignored_exceptions
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-objects
|
||||
@@ -79,9 +82,16 @@ def before_send(event, hint):
|
||||
log_msg = hint["log_record"].msg
|
||||
log_lvl = hint["log_record"].levelno
|
||||
|
||||
# Handle Error events and discard the rest
|
||||
if log_lvl == 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
|
||||
return
|
||||
# Handle Error and Critical events and discard the rest
|
||||
if log_lvl <= 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
|
||||
return None # Explicitly return None to drop the event
|
||||
|
||||
# Ignore exceptions with the ignored_exceptions
|
||||
if "exc_info" in hint and hint["exc_info"]:
|
||||
exc_value = str(hint["exc_info"][1])
|
||||
if any(ignored in exc_value for ignored in IGNORED_EXCEPTIONS):
|
||||
return None # Explicitly return None to drop the event
|
||||
|
||||
return event
|
||||
|
||||
|
||||
|
||||
@@ -25,9 +25,18 @@ SOCIALACCOUNT_EMAIL_AUTHENTICATION = True
|
||||
SOCIALACCOUNT_EMAIL_AUTHENTICATION_AUTO_CONNECT = True
|
||||
SOCIALACCOUNT_ADAPTER = "api.adapters.ProwlerSocialAccountAdapter"
|
||||
|
||||
# SAML keys
|
||||
SAML_PUBLIC_CERT = env("SAML_PUBLIC_CERT", default="")
|
||||
SAML_PRIVATE_KEY = env("SAML_PRIVATE_KEY", default="")
|
||||
|
||||
# def inline(pem: str) -> str:
|
||||
# return "".join(
|
||||
# line.strip()
|
||||
# for line in pem.splitlines()
|
||||
# if "CERTIFICATE" not in line and "KEY" not in line
|
||||
# )
|
||||
|
||||
|
||||
# # SAML keys (TODO: Validate certificates)
|
||||
# SAML_PUBLIC_CERT = inline(env("SAML_PUBLIC_CERT", default=""))
|
||||
# SAML_PRIVATE_KEY = inline(env("SAML_PRIVATE_KEY", default=""))
|
||||
|
||||
SOCIALACCOUNT_PROVIDERS = {
|
||||
"google": {
|
||||
@@ -60,12 +69,14 @@ SOCIALACCOUNT_PROVIDERS = {
|
||||
"entity_id": "urn:prowler.com:sp",
|
||||
},
|
||||
"advanced": {
|
||||
"x509cert": SAML_PUBLIC_CERT,
|
||||
"private_key": SAML_PRIVATE_KEY,
|
||||
# TODO: Validate certificates
|
||||
# "x509cert": SAML_PUBLIC_CERT,
|
||||
# "private_key": SAML_PRIVATE_KEY,
|
||||
# "authn_request_signed": True,
|
||||
# "want_message_signed": True,
|
||||
# "want_assertion_signed": True,
|
||||
"reject_idp_initiated_sso": False,
|
||||
"name_id_format": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
||||
"authn_request_signed": True,
|
||||
"want_assertion_signed": True,
|
||||
"want_message_signed": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -21,12 +21,15 @@ from api.models import (
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
ResourceTagMapping,
|
||||
Role,
|
||||
SAMLConfiguration,
|
||||
SAMLDomainIndex,
|
||||
@@ -43,12 +46,19 @@ from api.v1.serializers import TokenSerializer
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
|
||||
TODAY = str(datetime.today().date())
|
||||
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
|
||||
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
|
||||
TEST_USER = "dev@prowler.com"
|
||||
TEST_PASSWORD = "testing_psswd"
|
||||
|
||||
|
||||
def today_after_n_days(n_days: int) -> str:
|
||||
return datetime.strftime(
|
||||
datetime.today().date() + timedelta(days=n_days), "%Y-%m-%d"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def enforce_test_user_db_connection(django_db_setup, django_db_blocker):
|
||||
"""Ensure tests use the test user for database connections."""
|
||||
@@ -380,8 +390,27 @@ def providers_fixture(tenants_fixture):
|
||||
tenant_id=tenant.id,
|
||||
scanner_args={"key1": "value1", "key2": {"key21": "value21"}},
|
||||
)
|
||||
provider6 = Provider.objects.create(
|
||||
provider="m365",
|
||||
uid="m365.test.com",
|
||||
alias="m365_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return provider1, provider2, provider3, provider4, provider5
|
||||
return provider1, provider2, provider3, provider4, provider5, provider6
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def processor_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
processor = Processor.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
processor_type="mutelist",
|
||||
configuration="Mutelist:\n Accounts:\n *:\n Checks:\n iam_user_hardware_mfa_enabled:\n "
|
||||
" Regions:\n - *\n Resources:\n - *",
|
||||
)
|
||||
|
||||
return processor
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -633,6 +662,7 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
"servicename": "ec2",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
@@ -659,6 +689,7 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description orange juice",
|
||||
"servicename": "s3",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
muted=True,
|
||||
@@ -880,6 +911,22 @@ def compliance_requirements_overviews_fixture(scans_fixture, tenants_fixture):
|
||||
total_checks=3,
|
||||
)
|
||||
|
||||
# Create another compliance framework for testing MITRE ATT&CK
|
||||
requirement_overview7 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="mitre_attack_aws",
|
||||
framework="MITRE-ATTACK",
|
||||
version="1.0",
|
||||
description="MITRE ATT&CK",
|
||||
region="eu-west-1",
|
||||
requirement_id="mitre_requirement1",
|
||||
requirement_status=StatusChoices.FAIL,
|
||||
passed_checks=0,
|
||||
failed_checks=0,
|
||||
total_checks=0,
|
||||
)
|
||||
|
||||
return (
|
||||
requirement_overview1,
|
||||
requirement_overview2,
|
||||
@@ -887,6 +934,7 @@ def compliance_requirements_overviews_fixture(scans_fixture, tenants_fixture):
|
||||
requirement_overview4,
|
||||
requirement_overview5,
|
||||
requirement_overview6,
|
||||
requirement_overview7,
|
||||
)
|
||||
|
||||
|
||||
@@ -1042,6 +1090,20 @@ def backfill_scan_metadata_fixture(scans_fixture, findings_fixture):
|
||||
backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def lighthouse_config_fixture(authenticated_client, tenants_fixture):
|
||||
return LighthouseConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
name="OpenAI",
|
||||
api_key_decoded="sk-test1234567890T3BlbkFJtest1234567890",
|
||||
model="gpt-4o",
|
||||
temperature=0,
|
||||
max_tokens=4000,
|
||||
business_context="Test business context",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_finding(authenticated_client, providers_fixture, resources_fixture):
|
||||
provider = providers_fixture[0]
|
||||
@@ -1083,10 +1145,73 @@ def latest_scan_finding(authenticated_client, providers_fixture, resources_fixtu
|
||||
return finding
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_resource(authenticated_client, providers_fixture):
|
||||
provider = providers_fixture[0]
|
||||
tenant_id = str(providers_fixture[0].tenant_id)
|
||||
scan = Scan.objects.create(
|
||||
name="latest completed scan for resource",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
resource = Resource.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
uid="latest_resource_uid",
|
||||
name="Latest Resource",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
type="instance",
|
||||
metadata='{"test": "metadata"}',
|
||||
details='{"test": "details"}',
|
||||
)
|
||||
|
||||
resource_tag = ResourceTag.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
key="environment",
|
||||
value="test",
|
||||
)
|
||||
ResourceTagMapping.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
resource=resource,
|
||||
tag=resource_tag,
|
||||
)
|
||||
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
uid="test_finding_uid_latest",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="test status extended ",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact extended",
|
||||
severity=Severity.critical,
|
||||
raw_result={
|
||||
"status": Status.FAIL,
|
||||
"impact": Severity.critical,
|
||||
"severity": Severity.critical,
|
||||
},
|
||||
tags={"test": "latest"},
|
||||
check_id="test_check_id_latest",
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id_latest",
|
||||
"Description": "test description latest",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding.add_resources([resource])
|
||||
|
||||
backfill_resource_scan_summaries(tenant_id, str(scan.id))
|
||||
return resource
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def saml_setup(tenants_fixture):
|
||||
tenant_id = tenants_fixture[0].id
|
||||
domain = "example.com"
|
||||
domain = "prowler.com"
|
||||
|
||||
SAMLDomainIndex.objects.create(email_domain=domain, tenant_id=tenant_id)
|
||||
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import django.db
|
||||
from django.db.backends.postgresql.base import (
|
||||
DatabaseWrapper as BuiltinPostgresDatabaseWrapper,
|
||||
)
|
||||
from psycopg2 import InterfaceError
|
||||
|
||||
|
||||
class DatabaseWrapper(BuiltinPostgresDatabaseWrapper):
|
||||
def create_cursor(self, name=None):
|
||||
try:
|
||||
return super().create_cursor(name=name)
|
||||
except InterfaceError:
|
||||
django.db.close_old_connections()
|
||||
django.db.connection.connect()
|
||||
return super().create_cursor(name=name)
|
||||
@@ -3,6 +3,12 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=UserWarning, module="dj_rest_auth.registration.serializers"
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
BIN
api/src/backend/tasks/assets/fonts/FiraCode-Regular.ttf
Normal file
BIN
api/src/backend/tasks/assets/fonts/FiraCode-Regular.ttf
Normal file
Binary file not shown.
BIN
api/src/backend/tasks/assets/fonts/PlusJakartaSans-Regular.ttf
Normal file
BIN
api/src/backend/tasks/assets/fonts/PlusJakartaSans-Regular.ttf
Normal file
Binary file not shown.
BIN
api/src/backend/tasks/assets/img/prowler_logo.png
Normal file
BIN
api/src/backend/tasks/assets/img/prowler_logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
@@ -2,10 +2,10 @@ import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.tasks import perform_scheduled_scan_task
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import Provider, Scan, StateChoices
|
||||
|
||||
|
||||
@@ -24,15 +24,9 @@ def schedule_provider_scan(provider_instance: Provider):
|
||||
if PeriodicTask.objects.filter(
|
||||
interval=schedule, name=task_name, task="scan-perform-scheduled"
|
||||
).exists():
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "There is already a scheduled scan for this provider.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/provider_id"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
raise ConflictException(
|
||||
detail="There is already a scheduled scan for this provider.",
|
||||
pointer="/data/attributes/provider_id",
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import openai
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from api.models import Provider
|
||||
from api.models import LighthouseConfiguration, Provider
|
||||
from api.utils import prowler_provider_connection_test
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
@@ -39,3 +40,46 @@ def check_provider_connection(provider_id: str):
|
||||
|
||||
connection_error = f"{connection_result.error}" if connection_result.error else None
|
||||
return {"connected": connection_result.is_connected, "error": connection_error}
|
||||
|
||||
|
||||
def check_lighthouse_connection(lighthouse_config_id: str):
|
||||
"""
|
||||
Business logic to check the connection status of a Lighthouse configuration.
|
||||
|
||||
Args:
|
||||
lighthouse_config_id (str): The primary key of the LighthouseConfiguration instance to check.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing:
|
||||
- 'connected' (bool): Indicates whether the connection is successful.
|
||||
- 'error' (str or None): The error message if the connection failed, otherwise `None`.
|
||||
- 'available_models' (list): List of available models if connection is successful.
|
||||
|
||||
Raises:
|
||||
Model.DoesNotExist: If the lighthouse configuration does not exist.
|
||||
"""
|
||||
lighthouse_config = LighthouseConfiguration.objects.get(pk=lighthouse_config_id)
|
||||
|
||||
if not lighthouse_config.api_key_decoded:
|
||||
lighthouse_config.is_active = False
|
||||
lighthouse_config.save()
|
||||
return {
|
||||
"connected": False,
|
||||
"error": "API key is invalid or missing.",
|
||||
"available_models": [],
|
||||
}
|
||||
|
||||
try:
|
||||
client = openai.OpenAI(api_key=lighthouse_config.api_key_decoded)
|
||||
models = client.models.list()
|
||||
lighthouse_config.is_active = True
|
||||
lighthouse_config.save()
|
||||
return {
|
||||
"connected": True,
|
||||
"error": None,
|
||||
"available_models": [model.id for model in models.data],
|
||||
}
|
||||
except Exception as e:
|
||||
lighthouse_config.is_active = False
|
||||
lighthouse_config.save()
|
||||
return {"connected": False, "error": str(e), "available_models": []}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import re
|
||||
import zipfile
|
||||
|
||||
import boto3
|
||||
@@ -19,6 +20,7 @@ from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected im
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_github import GithubCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
@@ -30,6 +32,7 @@ from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
|
||||
KubernetesISO27001,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_m365 import M365ISO27001
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
@@ -89,6 +92,10 @@ COMPLIANCE_CLASS_MAP = {
|
||||
"m365": [
|
||||
(lambda name: name.startswith("cis_"), M365CIS),
|
||||
(lambda name: name == "prowler_threatscore_m365", ProwlerThreatScoreM365),
|
||||
(lambda name: name.startswith("iso27001_"), M365ISO27001),
|
||||
],
|
||||
"github": [
|
||||
(lambda name: name.startswith("cis_"), GithubCIS),
|
||||
],
|
||||
}
|
||||
|
||||
@@ -237,17 +244,27 @@ def _generate_output_directory(
|
||||
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
|
||||
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
|
||||
'/tmp/tenant-1234/aws/scan-5678/threatscore/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
# Sanitize the prowler provider name to ensure it is a valid directory name
|
||||
prowler_provider_sanitized = re.sub(r"[^\w\-]", "-", prowler_provider)
|
||||
|
||||
path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/prowler-output-"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
f"{prowler_provider_sanitized}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
compliance_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/compliance/prowler-output-"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
f"{prowler_provider_sanitized}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path
|
||||
threatscore_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/threatscore/prowler-output-"
|
||||
f"{prowler_provider_sanitized}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(threatscore_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path, threatscore_path
|
||||
|
||||
949
api/src/backend/tasks/jobs/report.py
Normal file
949
api/src/backend/tasks/jobs/report.py
Normal file
@@ -0,0 +1,949 @@
|
||||
import io
|
||||
import os
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
from celery.utils.log import get_task_logger
|
||||
from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIRECTORY
|
||||
from reportlab.lib import colors
|
||||
from reportlab.lib.enums import TA_CENTER
|
||||
from reportlab.lib.pagesizes import letter
|
||||
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
|
||||
from reportlab.lib.units import inch
|
||||
from reportlab.pdfbase import pdfmetrics
|
||||
from reportlab.pdfbase.ttfonts import TTFont
|
||||
from reportlab.pdfgen import canvas
|
||||
from reportlab.platypus import (
|
||||
Image,
|
||||
PageBreak,
|
||||
Paragraph,
|
||||
SimpleDocTemplate,
|
||||
Spacer,
|
||||
Table,
|
||||
TableStyle,
|
||||
)
|
||||
from tasks.jobs.export import _generate_output_directory, _upload_to_s3
|
||||
from tasks.utils import batched
|
||||
|
||||
from api.models import Finding, Provider, Scan, ScanSummary
|
||||
from api.utils import initialize_prowler_provider
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
|
||||
pdfmetrics.registerFont(
|
||||
TTFont(
|
||||
"PlusJakartaSans",
|
||||
os.path.join(
|
||||
os.path.dirname(__file__), "../assets/fonts/PlusJakartaSans-Regular.ttf"
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
pdfmetrics.registerFont(
|
||||
TTFont(
|
||||
"FiraCode",
|
||||
os.path.join(os.path.dirname(__file__), "../assets/fonts/FiraCode-Regular.ttf"),
|
||||
)
|
||||
)
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def generate_threatscore_report(
|
||||
scan_id: str,
|
||||
compliance_id: str,
|
||||
output_path: str,
|
||||
provider_id: str,
|
||||
only_failed: bool = True,
|
||||
min_risk_level: int = 4,
|
||||
):
|
||||
"""
|
||||
Generate a PDF compliance report based on Prowler ORM objects.
|
||||
|
||||
Parameters:
|
||||
- scan_id: ID of the scan executed by Prowler.
|
||||
- compliance_id: ID of the compliance framework (e.g., "nis2_azure").
|
||||
- output_path: Output PDF file path (e.g., "threatscore_report.pdf").
|
||||
- provider_id: Provider ID for the scan.
|
||||
- only_failed: If True, only requirements with status "FAIL" will be included in the list of requirements.
|
||||
- min_risk_level: Minimum risk level for critical failed requirements.
|
||||
"""
|
||||
logger.info(
|
||||
f"Generating the report for the scan {scan_id} with provider {provider_id}"
|
||||
)
|
||||
try:
|
||||
styles = getSampleStyleSheet()
|
||||
|
||||
prowler_dark_green = colors.Color(0.1, 0.5, 0.2)
|
||||
|
||||
title_style = ParagraphStyle(
|
||||
"CustomTitle",
|
||||
parent=styles["Title"],
|
||||
fontSize=24,
|
||||
textColor=prowler_dark_green,
|
||||
spaceAfter=20,
|
||||
fontName="PlusJakartaSans",
|
||||
alignment=TA_CENTER,
|
||||
)
|
||||
|
||||
h1 = ParagraphStyle(
|
||||
"CustomH1",
|
||||
parent=styles["Heading1"],
|
||||
fontSize=18,
|
||||
textColor=colors.Color(0.2, 0.4, 0.6),
|
||||
spaceBefore=20,
|
||||
spaceAfter=12,
|
||||
fontName="PlusJakartaSans",
|
||||
leftIndent=0,
|
||||
borderWidth=2,
|
||||
borderColor=colors.Color(0.2, 0.4, 0.6),
|
||||
borderPadding=8,
|
||||
backColor=colors.Color(0.95, 0.97, 1.0),
|
||||
)
|
||||
|
||||
h2 = ParagraphStyle(
|
||||
"CustomH2",
|
||||
parent=styles["Heading2"],
|
||||
fontSize=14,
|
||||
textColor=colors.Color(0.3, 0.5, 0.7),
|
||||
spaceBefore=15,
|
||||
spaceAfter=8,
|
||||
fontName="PlusJakartaSans",
|
||||
leftIndent=10,
|
||||
borderWidth=1,
|
||||
borderColor=colors.Color(0.7, 0.8, 0.9),
|
||||
borderPadding=5,
|
||||
backColor=colors.Color(0.98, 0.99, 1.0),
|
||||
)
|
||||
|
||||
h3 = ParagraphStyle(
|
||||
"CustomH3",
|
||||
parent=styles["Heading3"],
|
||||
fontSize=12,
|
||||
textColor=colors.Color(0.4, 0.6, 0.8),
|
||||
spaceBefore=10,
|
||||
spaceAfter=6,
|
||||
fontName="PlusJakartaSans",
|
||||
leftIndent=20,
|
||||
)
|
||||
|
||||
normal = ParagraphStyle(
|
||||
"CustomNormal",
|
||||
parent=styles["Normal"],
|
||||
fontSize=10,
|
||||
textColor=colors.Color(0.2, 0.2, 0.2),
|
||||
spaceBefore=4,
|
||||
spaceAfter=4,
|
||||
leftIndent=30,
|
||||
fontName="PlusJakartaSans",
|
||||
)
|
||||
|
||||
normal_center = ParagraphStyle(
|
||||
"CustomNormalCenter",
|
||||
parent=styles["Normal"],
|
||||
fontSize=10,
|
||||
textColor=colors.Color(0.2, 0.2, 0.2),
|
||||
fontName="PlusJakartaSans",
|
||||
)
|
||||
|
||||
provider_obj = Provider.objects.get(id=provider_id)
|
||||
prowler_provider = initialize_prowler_provider(provider_obj)
|
||||
provider_type = provider_obj.provider
|
||||
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
compliance_obj = frameworks_bulk[compliance_id]
|
||||
compliance_name = getattr(compliance_obj, "Framework", "N/A")
|
||||
compliance_version = getattr(compliance_obj, "Version", "N/A")
|
||||
compliance_description = getattr(compliance_obj, "Description", "")
|
||||
|
||||
logger.info(f"Getting findings for scan {scan_id}")
|
||||
findings_qs = (
|
||||
Finding.all_objects.filter(scan_id=scan_id).order_by("uid").iterator()
|
||||
)
|
||||
findings = []
|
||||
for batch, is_last in batched(findings_qs, DJANGO_FINDINGS_BATCH_SIZE):
|
||||
fos = [
|
||||
FindingOutput.transform_api_finding(f, prowler_provider) for f in batch
|
||||
]
|
||||
findings.extend(fos)
|
||||
|
||||
attrs_map = {}
|
||||
resp_reqs = []
|
||||
for req in compliance_obj.Requirements:
|
||||
req_id = req.Id
|
||||
attrs_map[req_id] = {
|
||||
"attributes": {
|
||||
"req_attributes": getattr(req, "Attributes", []),
|
||||
"checks": getattr(req, "Checks", []),
|
||||
},
|
||||
"description": getattr(req, "Description", ""),
|
||||
}
|
||||
status = "UNKNOWN"
|
||||
description = getattr(req, "Description", "")
|
||||
for f in findings:
|
||||
if f.check_id in getattr(req, "Checks", []):
|
||||
status = getattr(f, "status", "UNKNOWN")
|
||||
description = getattr(f, "description", description)
|
||||
break
|
||||
resp_reqs.append(
|
||||
{
|
||||
"id": req_id,
|
||||
"attributes": {
|
||||
"framework": compliance_name,
|
||||
"version": compliance_version,
|
||||
"status": status,
|
||||
"description": description,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
def create_risk_component(risk_level, weight, score=0):
|
||||
"""Create a visual risk component similar to the UI design"""
|
||||
if risk_level >= 4:
|
||||
risk_color = colors.Color(0.8, 0.2, 0.2)
|
||||
elif risk_level >= 3:
|
||||
risk_color = colors.Color(0.9, 0.6, 0.2)
|
||||
elif risk_level >= 2:
|
||||
risk_color = colors.Color(0.9, 0.9, 0.2)
|
||||
else:
|
||||
risk_color = colors.Color(0.2, 0.8, 0.2)
|
||||
|
||||
if weight <= 50:
|
||||
weight_color = colors.Color(0.2, 0.8, 0.2)
|
||||
elif weight <= 100:
|
||||
weight_color = colors.Color(0.9, 0.9, 0.2)
|
||||
else:
|
||||
weight_color = colors.Color(0.8, 0.2, 0.2)
|
||||
|
||||
score_color = colors.Color(0.4, 0.4, 0.4)
|
||||
|
||||
data = [
|
||||
[
|
||||
"Risk Level:",
|
||||
str(risk_level),
|
||||
"Weight:",
|
||||
str(weight),
|
||||
"Score:",
|
||||
str(score),
|
||||
]
|
||||
]
|
||||
|
||||
table = Table(
|
||||
data,
|
||||
colWidths=[
|
||||
0.8 * inch,
|
||||
0.4 * inch,
|
||||
0.6 * inch,
|
||||
0.4 * inch,
|
||||
0.5 * inch,
|
||||
0.4 * inch,
|
||||
],
|
||||
)
|
||||
|
||||
table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
("BACKGROUND", (0, 0), (0, 0), colors.Color(0.9, 0.9, 0.9)),
|
||||
("BACKGROUND", (1, 0), (1, 0), risk_color),
|
||||
("TEXTCOLOR", (1, 0), (1, 0), colors.white),
|
||||
("FONTNAME", (1, 0), (1, 0), "FiraCode"),
|
||||
("BACKGROUND", (2, 0), (2, 0), colors.Color(0.9, 0.9, 0.9)),
|
||||
("BACKGROUND", (3, 0), (3, 0), weight_color),
|
||||
("TEXTCOLOR", (3, 0), (3, 0), colors.white),
|
||||
("FONTNAME", (3, 0), (3, 0), "FiraCode"),
|
||||
("BACKGROUND", (4, 0), (4, 0), colors.Color(0.9, 0.9, 0.9)),
|
||||
("BACKGROUND", (5, 0), (5, 0), score_color),
|
||||
("TEXTCOLOR", (5, 0), (5, 0), colors.white),
|
||||
("FONTNAME", (5, 0), (5, 0), "FiraCode"),
|
||||
("ALIGN", (0, 0), (-1, -1), "CENTER"),
|
||||
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
|
||||
("FONTSIZE", (0, 0), (-1, -1), 10),
|
||||
("GRID", (0, 0), (-1, -1), 0.5, colors.black),
|
||||
("LEFTPADDING", (0, 0), (-1, -1), 6),
|
||||
("RIGHTPADDING", (0, 0), (-1, -1), 6),
|
||||
("TOPPADDING", (0, 0), (-1, -1), 8),
|
||||
("BOTTOMPADDING", (0, 0), (-1, -1), 8),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
return table
|
||||
|
||||
def create_status_component(status):
|
||||
"""Create a visual status component with colors"""
|
||||
if status.upper() == "PASS":
|
||||
status_color = colors.Color(0.2, 0.8, 0.2)
|
||||
elif status.upper() == "FAIL":
|
||||
status_color = colors.Color(0.8, 0.2, 0.2)
|
||||
else:
|
||||
status_color = colors.Color(0.4, 0.4, 0.4)
|
||||
|
||||
data = [["State:", status.upper()]]
|
||||
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
|
||||
table = Table(data, colWidths=[0.6 * inch, 0.8 * inch])
|
||||
|
||||
table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
("BACKGROUND", (0, 0), (0, 0), colors.Color(0.9, 0.9, 0.9)),
|
||||
("FONTNAME", (0, 0), (0, 0), "PlusJakartaSans"),
|
||||
("BACKGROUND", (1, 0), (1, 0), status_color),
|
||||
("TEXTCOLOR", (1, 0), (1, 0), colors.white),
|
||||
("FONTNAME", (1, 0), (1, 0), "FiraCode"),
|
||||
("ALIGN", (0, 0), (-1, -1), "CENTER"),
|
||||
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
|
||||
("FONTSIZE", (0, 0), (-1, -1), 12),
|
||||
("GRID", (0, 0), (-1, -1), 0.5, colors.black),
|
||||
("LEFTPADDING", (0, 0), (-1, -1), 8),
|
||||
("RIGHTPADDING", (0, 0), (-1, -1), 8),
|
||||
("TOPPADDING", (0, 0), (-1, -1), 10),
|
||||
("BOTTOMPADDING", (0, 0), (-1, -1), 10),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
return table
|
||||
|
||||
def create_section_score_chart(resp_reqs, attrs_map):
|
||||
"""Create a bar chart showing compliance score by section"""
|
||||
sections_data = {}
|
||||
|
||||
for req in resp_reqs:
|
||||
req_id = req["id"]
|
||||
attr = attrs_map.get(req_id, {})
|
||||
status = req["attributes"]["status"]
|
||||
|
||||
metadata = attr.get("attributes", {}).get("req_attributes", [])
|
||||
if metadata:
|
||||
m = metadata[0]
|
||||
section = getattr(m, "Section", "Unknown")
|
||||
risk_level = getattr(m, "LevelOfRisk", 0)
|
||||
weight = getattr(m, "Weight", 0)
|
||||
|
||||
if section not in sections_data:
|
||||
sections_data[section] = {
|
||||
"total_score": 0,
|
||||
"max_possible_score": 0,
|
||||
}
|
||||
|
||||
max_score = risk_level * weight
|
||||
sections_data[section]["max_possible_score"] += max_score
|
||||
|
||||
if status == "PASS":
|
||||
sections_data[section]["total_score"] += max_score
|
||||
|
||||
section_names = []
|
||||
compliance_percentages = []
|
||||
|
||||
for section, data in sections_data.items():
|
||||
if data["max_possible_score"] > 0:
|
||||
compliance_percentage = (
|
||||
data["total_score"] / data["max_possible_score"]
|
||||
) * 100
|
||||
else:
|
||||
compliance_percentage = 0
|
||||
|
||||
section_names.append(section)
|
||||
compliance_percentages.append(compliance_percentage)
|
||||
|
||||
sorted_data = sorted(
|
||||
zip(section_names, compliance_percentages),
|
||||
key=lambda x: x[1],
|
||||
reverse=True,
|
||||
)
|
||||
section_names, compliance_percentages = (
|
||||
zip(*sorted_data) if sorted_data else ([], [])
|
||||
)
|
||||
|
||||
fig, ax = plt.subplots(figsize=(12, 8))
|
||||
|
||||
colors_list = []
|
||||
for percentage in compliance_percentages:
|
||||
if percentage >= 80:
|
||||
color = "#4CAF50"
|
||||
elif percentage >= 60:
|
||||
color = "#8BC34A"
|
||||
elif percentage >= 40:
|
||||
color = "#FFEB3B"
|
||||
elif percentage >= 20:
|
||||
color = "#FF9800"
|
||||
else:
|
||||
color = "#F44336"
|
||||
colors_list.append(color)
|
||||
|
||||
bars = ax.bar(section_names, compliance_percentages, color=colors_list)
|
||||
|
||||
ax.set_ylabel("Compliance Score (%)", fontsize=12)
|
||||
ax.set_xlabel("Section", fontsize=12)
|
||||
ax.set_ylim(0, 100)
|
||||
|
||||
for bar, percentage in zip(bars, compliance_percentages):
|
||||
height = bar.get_height()
|
||||
ax.text(
|
||||
bar.get_x() + bar.get_width() / 2.0,
|
||||
height + 1,
|
||||
f"{percentage:.1f}%",
|
||||
ha="center",
|
||||
va="bottom",
|
||||
fontweight="bold",
|
||||
)
|
||||
|
||||
plt.xticks(rotation=45, ha="right")
|
||||
|
||||
ax.grid(True, alpha=0.3, axis="y")
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
buffer = io.BytesIO()
|
||||
plt.savefig(buffer, format="png", dpi=300, bbox_inches="tight")
|
||||
buffer.seek(0)
|
||||
plt.close()
|
||||
|
||||
return buffer
|
||||
|
||||
def get_finding_info(check_id: str):
|
||||
return [f for f in findings if f.check_id == check_id]
|
||||
|
||||
doc = SimpleDocTemplate(
|
||||
output_path,
|
||||
pagesize=letter,
|
||||
title=f"Prowler ThreatScore Report - {compliance_name}",
|
||||
author="Prowler",
|
||||
subject=f"Compliance Report for {compliance_name}",
|
||||
creator="Prowler Compliance Generator",
|
||||
keywords=f"compliance,{compliance_name},security,framework,prowler",
|
||||
)
|
||||
|
||||
elements = []
|
||||
|
||||
img_path = os.path.join(
|
||||
os.path.dirname(__file__), "../assets/img/prowler_logo.png"
|
||||
)
|
||||
logo = Image(
|
||||
img_path,
|
||||
width=5 * inch,
|
||||
height=0.8 * inch,
|
||||
)
|
||||
elements.append(logo)
|
||||
|
||||
elements.append(Spacer(1, 0.5 * inch))
|
||||
elements.append(Paragraph("Prowler ThreatScore Report", title_style))
|
||||
elements.append(Spacer(1, 0.5 * inch))
|
||||
|
||||
info_data = [
|
||||
["Compliance Framework:", compliance_name],
|
||||
["Compliance ID:", compliance_id],
|
||||
["Version:", compliance_version],
|
||||
["Scan ID:", scan_id],
|
||||
["Description:", Paragraph(compliance_description, normal_center)],
|
||||
]
|
||||
info_table = Table(info_data, colWidths=[2 * inch, 4 * inch])
|
||||
info_table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
("BACKGROUND", (0, 0), (0, 4), colors.Color(0.2, 0.4, 0.6)),
|
||||
("TEXTCOLOR", (0, 0), (0, 4), colors.white),
|
||||
("FONTNAME", (0, 0), (0, 4), "FiraCode"),
|
||||
("BACKGROUND", (1, 0), (1, 4), colors.Color(0.95, 0.97, 1.0)),
|
||||
("TEXTCOLOR", (1, 0), (1, 4), colors.Color(0.2, 0.2, 0.2)),
|
||||
("FONTNAME", (1, 0), (1, 4), "PlusJakartaSans"),
|
||||
("ALIGN", (0, 0), (-1, -1), "LEFT"),
|
||||
("VALIGN", (0, 0), (-1, -1), "TOP"),
|
||||
("FONTSIZE", (0, 0), (-1, -1), 11),
|
||||
("GRID", (0, 0), (-1, -1), 1, colors.Color(0.7, 0.8, 0.9)),
|
||||
("LEFTPADDING", (0, 0), (-1, -1), 10),
|
||||
("RIGHTPADDING", (0, 0), (-1, -1), 10),
|
||||
("TOPPADDING", (0, 0), (-1, -1), 8),
|
||||
("BOTTOMPADDING", (0, 0), (-1, -1), 8),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
elements.append(info_table)
|
||||
|
||||
elements.append(PageBreak())
|
||||
|
||||
elements.append(Paragraph("Compliance Score by Sections", h1))
|
||||
elements.append(Spacer(1, 0.2 * inch))
|
||||
|
||||
chart_buffer = create_section_score_chart(resp_reqs, attrs_map)
|
||||
chart_image = Image(chart_buffer, width=7 * inch, height=5.5 * inch)
|
||||
elements.append(chart_image)
|
||||
|
||||
total_score = 0
|
||||
max_possible_score = 0
|
||||
|
||||
for req in resp_reqs:
|
||||
req_id = req["id"]
|
||||
attr = attrs_map.get(req_id, {})
|
||||
status = req["attributes"]["status"]
|
||||
|
||||
metadata = attr.get("attributes", {}).get("req_attributes", [])
|
||||
if metadata and len(metadata) > 0:
|
||||
m = metadata[0]
|
||||
risk_level = getattr(m, "LevelOfRisk", 0)
|
||||
weight = getattr(m, "Weight", 0)
|
||||
max_score = risk_level * weight
|
||||
max_possible_score += max_score
|
||||
|
||||
if status == "PASS":
|
||||
total_score += max_score
|
||||
|
||||
overall_compliance = (
|
||||
(total_score / max_possible_score * 100) if max_possible_score > 0 else 0
|
||||
)
|
||||
|
||||
elements.append(Spacer(1, 0.3 * inch))
|
||||
|
||||
summary_data = [
|
||||
["Total Score:", f"{total_score:,}"],
|
||||
["Max Possible Score:", f"{max_possible_score:,}"],
|
||||
["Overall Compliance:", f"{overall_compliance:.2f}%"],
|
||||
]
|
||||
|
||||
if overall_compliance >= 80:
|
||||
compliance_color = colors.Color(0.2, 0.8, 0.2)
|
||||
elif overall_compliance >= 60:
|
||||
compliance_color = colors.Color(0.8, 0.8, 0.2)
|
||||
else:
|
||||
compliance_color = colors.Color(0.8, 0.2, 0.2)
|
||||
|
||||
summary_table = Table(summary_data, colWidths=[2.5 * inch, 2 * inch])
|
||||
summary_table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
("BACKGROUND", (0, 0), (0, 1), colors.Color(0.3, 0.5, 0.7)),
|
||||
("TEXTCOLOR", (0, 0), (0, 1), colors.white),
|
||||
("FONTNAME", (0, 0), (0, 1), "FiraCode"),
|
||||
("BACKGROUND", (0, 2), (0, 2), colors.Color(0.1, 0.3, 0.5)),
|
||||
("TEXTCOLOR", (0, 2), (0, 2), colors.white),
|
||||
("FONTNAME", (0, 2), (0, 2), "FiraCode"),
|
||||
("FONTSIZE", (0, 2), (0, 2), 12),
|
||||
("BACKGROUND", (1, 0), (1, 1), colors.Color(0.95, 0.97, 1.0)),
|
||||
("TEXTCOLOR", (1, 0), (1, 1), colors.Color(0.2, 0.2, 0.2)),
|
||||
("FONTNAME", (1, 0), (1, 1), "PlusJakartaSans"),
|
||||
("BACKGROUND", (1, 2), (1, 2), compliance_color),
|
||||
("TEXTCOLOR", (1, 2), (1, 2), colors.white),
|
||||
("FONTNAME", (1, 2), (1, 2), "FiraCode"),
|
||||
("FONTSIZE", (1, 2), (1, 2), 14),
|
||||
("ALIGN", (0, 0), (-1, -1), "CENTER"),
|
||||
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
|
||||
("FONTSIZE", (0, 0), (1, 1), 11),
|
||||
("GRID", (0, 0), (-1, -1), 1.5, colors.Color(0.5, 0.6, 0.7)),
|
||||
("LEFTPADDING", (0, 0), (-1, -1), 12),
|
||||
("RIGHTPADDING", (0, 0), (-1, -1), 12),
|
||||
("TOPPADDING", (0, 0), (-1, -1), 10),
|
||||
("BOTTOMPADDING", (0, 0), (-1, -1), 10),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
elements.append(summary_table)
|
||||
|
||||
elements.append(PageBreak())
|
||||
|
||||
elements.append(Paragraph("Requirements Index", h1))
|
||||
|
||||
sections = {}
|
||||
for req_id, req in attrs_map.items():
|
||||
meta = req["attributes"]["req_attributes"][0]
|
||||
section = getattr(meta, "Section", "N/A")
|
||||
subsection = getattr(meta, "SubSection", "N/A")
|
||||
title = getattr(meta, "Title", "N/A")
|
||||
|
||||
if section not in sections:
|
||||
sections[section] = {}
|
||||
if subsection not in sections[section]:
|
||||
sections[section][subsection] = []
|
||||
|
||||
sections[section][subsection].append({"id": req_id, "title": title})
|
||||
|
||||
section_num = 1
|
||||
for section_name, subsections in sections.items():
|
||||
elements.append(Paragraph(f"{section_num}. {section_name}", h2))
|
||||
|
||||
subsection_num = 1
|
||||
for subsection_name, requirements in subsections.items():
|
||||
elements.append(Paragraph(f"{subsection_name}", h3))
|
||||
|
||||
req_num = 1
|
||||
for req in requirements:
|
||||
elements.append(Paragraph(f"{req['id']} - {req['title']}", normal))
|
||||
req_num += 1
|
||||
|
||||
subsection_num += 1
|
||||
|
||||
section_num += 1
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
|
||||
elements.append(PageBreak())
|
||||
|
||||
elements.append(Paragraph("Top Requirements by Level of Risk", h1))
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
elements.append(
|
||||
Paragraph(
|
||||
f"Critical Failed Requirements (Risk Level ≥ {min_risk_level})", h2
|
||||
)
|
||||
)
|
||||
elements.append(Spacer(1, 0.2 * inch))
|
||||
|
||||
critical_reqs = []
|
||||
for req in resp_reqs:
|
||||
status = req["attributes"]["status"]
|
||||
if status == "FAIL":
|
||||
metadata = (
|
||||
attrs_map.get(req["id"], {})
|
||||
.get("attributes", {})
|
||||
.get("req_attributes", [{}])[0]
|
||||
)
|
||||
if metadata:
|
||||
risk_level = getattr(metadata, "LevelOfRisk", 0)
|
||||
weight = getattr(metadata, "Weight", 0)
|
||||
|
||||
if risk_level >= min_risk_level:
|
||||
critical_reqs.append(
|
||||
{
|
||||
"req": req,
|
||||
"attr": attrs_map[req["id"]],
|
||||
"risk_level": risk_level,
|
||||
"weight": weight,
|
||||
"metadata": metadata,
|
||||
}
|
||||
)
|
||||
|
||||
critical_reqs.sort(key=lambda x: (x["risk_level"], x["weight"]), reverse=True)
|
||||
|
||||
if not critical_reqs:
|
||||
elements.append(
|
||||
Paragraph(
|
||||
"✅ No critical failed requirements found. Great job!", normal
|
||||
)
|
||||
)
|
||||
else:
|
||||
elements.append(
|
||||
Paragraph(
|
||||
f"Found {len(critical_reqs)} critical failed requirements that require immediate attention:",
|
||||
normal,
|
||||
)
|
||||
)
|
||||
elements.append(Spacer(1, 0.5 * inch))
|
||||
|
||||
table_data = [["Risk", "Weight", "Requirement ID", "Title", "Section"]]
|
||||
|
||||
for idx, critical_req in enumerate(critical_reqs):
|
||||
req_id = critical_req["req"]["id"]
|
||||
risk_level = critical_req["risk_level"]
|
||||
weight = critical_req["weight"]
|
||||
title = getattr(critical_req["metadata"], "Title", "N/A")
|
||||
section = getattr(critical_req["metadata"], "Section", "N/A")
|
||||
|
||||
if len(title) > 50:
|
||||
title = title[:47] + "..."
|
||||
|
||||
table_data.append(
|
||||
[str(risk_level), str(weight), req_id, title, section]
|
||||
)
|
||||
|
||||
critical_table = Table(
|
||||
table_data,
|
||||
colWidths=[0.6 * inch, 0.8 * inch, 1.2 * inch, 3 * inch, 1.4 * inch],
|
||||
)
|
||||
|
||||
critical_table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
("BACKGROUND", (0, 0), (-1, 0), colors.Color(0.8, 0.2, 0.2)),
|
||||
("TEXTCOLOR", (0, 0), (-1, 0), colors.white),
|
||||
("FONTNAME", (0, 0), (-1, 0), "FiraCode"),
|
||||
("FONTSIZE", (0, 0), (-1, 0), 10),
|
||||
("BACKGROUND", (0, 1), (0, -1), colors.Color(0.8, 0.2, 0.2)),
|
||||
("TEXTCOLOR", (0, 1), (0, -1), colors.white),
|
||||
("FONTNAME", (0, 1), (0, -1), "FiraCode"),
|
||||
("ALIGN", (0, 1), (0, -1), "CENTER"),
|
||||
("FONTSIZE", (0, 1), (0, -1), 12),
|
||||
("ALIGN", (1, 1), (1, -1), "CENTER"),
|
||||
("FONTNAME", (1, 1), (1, -1), "FiraCode"),
|
||||
("FONTNAME", (2, 1), (2, -1), "FiraCode"),
|
||||
("FONTSIZE", (2, 1), (2, -1), 9),
|
||||
("FONTNAME", (3, 1), (-1, -1), "PlusJakartaSans"),
|
||||
("FONTSIZE", (3, 1), (-1, -1), 8),
|
||||
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
|
||||
("GRID", (0, 0), (-1, -1), 1, colors.Color(0.7, 0.7, 0.7)),
|
||||
("LEFTPADDING", (0, 0), (-1, -1), 6),
|
||||
("RIGHTPADDING", (0, 0), (-1, -1), 6),
|
||||
("TOPPADDING", (0, 0), (-1, -1), 8),
|
||||
("BOTTOMPADDING", (0, 0), (-1, -1), 8),
|
||||
(
|
||||
"BACKGROUND",
|
||||
(1, 1),
|
||||
(-1, -1),
|
||||
colors.Color(0.98, 0.98, 0.98),
|
||||
),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
for idx, critical_req in enumerate(critical_reqs):
|
||||
row_idx = idx + 1
|
||||
weight = critical_req["weight"]
|
||||
|
||||
if weight >= 150:
|
||||
weight_color = colors.Color(0.8, 0.2, 0.2)
|
||||
elif weight >= 100:
|
||||
weight_color = colors.Color(0.9, 0.6, 0.2)
|
||||
else:
|
||||
weight_color = colors.Color(0.9, 0.9, 0.2)
|
||||
|
||||
critical_table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
("BACKGROUND", (1, row_idx), (1, row_idx), weight_color),
|
||||
("TEXTCOLOR", (1, row_idx), (1, row_idx), colors.white),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
elements.append(critical_table)
|
||||
elements.append(Spacer(1, 0.2 * inch))
|
||||
|
||||
warning_text = """
|
||||
<b>IMMEDIATE ACTION REQUIRED:</b><br/>
|
||||
These requirements have the highest risk levels and have failed compliance checks.
|
||||
Please prioritize addressing these issues to improve your security posture.
|
||||
"""
|
||||
|
||||
warning_style = ParagraphStyle(
|
||||
"Warning",
|
||||
parent=styles["Normal"],
|
||||
fontSize=11,
|
||||
textColor=colors.Color(0.8, 0.2, 0.2),
|
||||
spaceBefore=10,
|
||||
spaceAfter=10,
|
||||
leftIndent=20,
|
||||
rightIndent=20,
|
||||
fontName="PlusJakartaSans",
|
||||
backColor=colors.Color(1.0, 0.95, 0.95),
|
||||
borderWidth=2,
|
||||
borderColor=colors.Color(0.8, 0.2, 0.2),
|
||||
borderPadding=10,
|
||||
)
|
||||
|
||||
elements.append(Paragraph(warning_text, warning_style))
|
||||
|
||||
elements.append(PageBreak())
|
||||
|
||||
def get_weight(req):
|
||||
req_id = req["id"]
|
||||
attr = attrs_map.get(req_id, {})
|
||||
metadata = attr.get("attributes", {}).get("metadata", [])
|
||||
if metadata:
|
||||
return metadata[0].get("Weight", 0)
|
||||
return 0
|
||||
|
||||
sorted_reqs = sorted(resp_reqs, key=get_weight, reverse=True)
|
||||
|
||||
if only_failed:
|
||||
sorted_reqs = [
|
||||
req for req in sorted_reqs if req["attributes"]["status"] == "FAIL"
|
||||
]
|
||||
|
||||
for req in sorted_reqs:
|
||||
req_id = req["id"]
|
||||
attr = attrs_map.get(req_id, {})
|
||||
desc = req["attributes"]["description"]
|
||||
status = req["attributes"]["status"]
|
||||
|
||||
elements.append(Paragraph(f"{req_id}: {attr.get('description', desc)}", h1))
|
||||
|
||||
status_component = create_status_component(status)
|
||||
elements.append(status_component)
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
|
||||
metadata = attr.get("attributes", {}).get("req_attributes", [])
|
||||
if metadata and len(metadata) > 0:
|
||||
m = metadata[0]
|
||||
elements.append(Paragraph("Title: ", h3))
|
||||
elements.append(Paragraph(f"{getattr(m, 'Title', 'N/A')}", normal))
|
||||
elements.append(Paragraph("Section: ", h3))
|
||||
elements.append(Paragraph(f"{getattr(m, 'Section', 'N/A')}", normal))
|
||||
elements.append(Paragraph("SubSection: ", h3))
|
||||
elements.append(Paragraph(f"{getattr(m, 'SubSection', 'N/A')}", normal))
|
||||
elements.append(Paragraph("Description: ", h3))
|
||||
elements.append(
|
||||
Paragraph(f"{getattr(m, 'AttributeDescription', 'N/A')}", normal)
|
||||
)
|
||||
elements.append(Paragraph("Additional Information: ", h3))
|
||||
elements.append(
|
||||
Paragraph(f"{getattr(m, 'AdditionalInformation', 'N/A')}", normal)
|
||||
)
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
|
||||
risk_level = getattr(m, "LevelOfRisk", 0)
|
||||
weight = getattr(m, "Weight", 0)
|
||||
|
||||
if status == "PASS":
|
||||
score = risk_level * weight
|
||||
else:
|
||||
score = 0
|
||||
|
||||
risk_component = create_risk_component(risk_level, weight, score)
|
||||
elements.append(risk_component)
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
|
||||
checks = attr.get("attributes", {}).get("checks", [])
|
||||
for cid in checks:
|
||||
elements.append(Paragraph(f"Check: {cid}", h2))
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
finds = get_finding_info(cid)
|
||||
if not finds:
|
||||
elements.append(
|
||||
Paragraph("- No information for this finding currently", normal)
|
||||
)
|
||||
else:
|
||||
findings_table_data = [
|
||||
[
|
||||
"Finding",
|
||||
"Resource name",
|
||||
"Severity",
|
||||
"Status",
|
||||
"Region",
|
||||
]
|
||||
]
|
||||
for f in finds:
|
||||
check_meta = getattr(f, "metadata", {})
|
||||
title = getattr(
|
||||
check_meta, "CheckTitle", getattr(attr, "CheckId", "")
|
||||
)
|
||||
resource_name = getattr(f, "resource_name", "")
|
||||
if not resource_name:
|
||||
resource_name = getattr(f, "resource_uid", "")
|
||||
severity = getattr(check_meta, "Severity", "").capitalize()
|
||||
status = getattr(f, "status", "").upper()
|
||||
region = getattr(f, "region", "global")
|
||||
|
||||
findings_table_data.append(
|
||||
[
|
||||
Paragraph(title, normal_center),
|
||||
Paragraph(resource_name, normal_center),
|
||||
Paragraph(severity, normal_center),
|
||||
Paragraph(status, normal_center),
|
||||
Paragraph(region, normal_center),
|
||||
]
|
||||
)
|
||||
findings_table = Table(
|
||||
findings_table_data,
|
||||
colWidths=[
|
||||
2.5 * inch,
|
||||
3 * inch,
|
||||
0.9 * inch,
|
||||
0.9 * inch,
|
||||
0.9 * inch,
|
||||
],
|
||||
)
|
||||
findings_table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
(
|
||||
"BACKGROUND",
|
||||
(0, 0),
|
||||
(-1, 0),
|
||||
colors.Color(0.2, 0.4, 0.6),
|
||||
),
|
||||
("TEXTCOLOR", (0, 0), (-1, 0), colors.white),
|
||||
("FONTNAME", (0, 0), (-1, 0), "FiraCode"),
|
||||
("ALIGN", (0, 0), (0, 0), "CENTER"),
|
||||
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
|
||||
("FONTSIZE", (0, 0), (-1, -1), 9),
|
||||
(
|
||||
"GRID",
|
||||
(0, 0),
|
||||
(-1, -1),
|
||||
0.1,
|
||||
colors.Color(0.7, 0.8, 0.9),
|
||||
),
|
||||
("LEFTPADDING", (0, 0), (0, 0), 0),
|
||||
("RIGHTPADDING", (0, 0), (0, 0), 0),
|
||||
("TOPPADDING", (0, 0), (-1, -1), 4),
|
||||
("BOTTOMPADDING", (0, 0), (-1, -1), 4),
|
||||
]
|
||||
)
|
||||
)
|
||||
elements.append(findings_table)
|
||||
elements.append(Spacer(1, 0.1 * inch))
|
||||
|
||||
elements.append(PageBreak())
|
||||
|
||||
def add_footer(canvas: canvas.Canvas, doc):
|
||||
width, height = doc.pagesize
|
||||
page_num_text = f"Page {doc.page}"
|
||||
canvas.setFont("PlusJakartaSans", 9)
|
||||
canvas.setFillColorRGB(0.4, 0.4, 0.4)
|
||||
canvas.drawString(30, 20, page_num_text)
|
||||
powered_text = "Powered by Prowler"
|
||||
text_width = canvas.stringWidth(powered_text, "PlusJakartaSans", 9)
|
||||
canvas.drawString(width - text_width - 30, 20, powered_text)
|
||||
|
||||
doc.build(elements, onFirstPage=add_footer, onLaterPages=add_footer)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Error building the document, line {e.__traceback__.tb_lineno} -- {e}"
|
||||
)
|
||||
raise e
|
||||
|
||||
|
||||
def generate_threatscore_report_job(tenant_id: str, scan_id: str, provider_id: str):
|
||||
# Check if the scan has findings
|
||||
if not ScanSummary.objects.filter(scan_id=scan_id).exists():
|
||||
logger.info(f"No findings found for scan {scan_id}")
|
||||
return {"upload": False}
|
||||
|
||||
provider_obj = Provider.objects.get(id=provider_id)
|
||||
provider_uid = provider_obj.uid
|
||||
provider_type = provider_obj.provider
|
||||
|
||||
if provider_type not in ["aws", "azure", "gcp", "m365"]:
|
||||
logger.info(f"Provider {provider_id} is not supported for threatscore report")
|
||||
return {"upload": False}
|
||||
|
||||
# This compliance is hardcoded because is the only one that is available for the threatscore report
|
||||
compliance_id = f"prowler_threatscore_{provider_type}"
|
||||
logger.info(
|
||||
f"Generating threatscore report for scan {scan_id} with compliance {compliance_id} inside the job"
|
||||
)
|
||||
try:
|
||||
logger.info("Generating the output directory")
|
||||
out_dir, _, threatscore_path = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating output directory: {e}")
|
||||
return {"error": str(e)}
|
||||
|
||||
pdf_path = f"{threatscore_path}_threatscore_report.pdf"
|
||||
logger.info(f"The path for the threatscore report is {pdf_path}")
|
||||
generate_threatscore_report(
|
||||
scan_id=scan_id,
|
||||
compliance_id=compliance_id,
|
||||
output_path=pdf_path,
|
||||
provider_id=provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
upload_uri = _upload_to_s3(tenant_id, pdf_path, scan_id)
|
||||
if upload_uri:
|
||||
try:
|
||||
rmtree(Path(pdf_path).parent, ignore_errors=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting output files: {e}")
|
||||
final_location, did_upload = upload_uri, True
|
||||
else:
|
||||
final_location, did_upload = out_dir, False
|
||||
|
||||
Scan.all_objects.filter(id=scan_id).update(output_location=final_location)
|
||||
logger.info(f"Threatscore report outputs at {final_location}")
|
||||
|
||||
return {"upload": did_upload}
|
||||
@@ -1,22 +1,29 @@
|
||||
import json
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
from django.db import IntegrityError, OperationalError
|
||||
from django.db.models import Case, Count, IntegerField, Sum, When
|
||||
from django.db.models import Case, Count, IntegerField, Prefetch, Sum, When
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_utils import create_objects_in_batches, rls_transaction
|
||||
from api.db_utils import (
|
||||
create_objects_in_batches,
|
||||
rls_transaction,
|
||||
update_objects_in_batches,
|
||||
)
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Processor,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceScanSummary,
|
||||
@@ -26,7 +33,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
)
|
||||
from api.models import StatusChoices as FindingStatus
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.utils import initialize_prowler_provider, return_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.outputs.finding import Finding as ProwlerFinding
|
||||
from prowler.lib.scan.scan import Scan as ProwlerScan
|
||||
@@ -101,7 +108,10 @@ def _store_resources(
|
||||
|
||||
|
||||
def perform_prowler_scan(
|
||||
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
|
||||
tenant_id: str,
|
||||
scan_id: str,
|
||||
provider_id: str,
|
||||
checks_to_execute: list[str] | None = None,
|
||||
):
|
||||
"""
|
||||
Perform a scan using Prowler and store the findings and resources in the database.
|
||||
@@ -132,14 +142,28 @@ def perform_prowler_scan(
|
||||
scan_instance.started_at = datetime.now(tz=timezone.utc)
|
||||
scan_instance.save()
|
||||
|
||||
# Find the mutelist processor if it exists
|
||||
with rls_transaction(tenant_id):
|
||||
try:
|
||||
mutelist_processor = Processor.objects.get(
|
||||
tenant_id=tenant_id, processor_type=Processor.ProcessorChoices.MUTELIST
|
||||
)
|
||||
except Processor.DoesNotExist:
|
||||
mutelist_processor = None
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing mutelist rules: {e}")
|
||||
mutelist_processor = None
|
||||
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
try:
|
||||
prowler_provider = initialize_prowler_provider(provider_instance)
|
||||
prowler_provider = initialize_prowler_provider(
|
||||
provider_instance, mutelist_processor
|
||||
)
|
||||
provider_instance.connected = True
|
||||
except Exception as e:
|
||||
provider_instance.connected = False
|
||||
exc = ValueError(
|
||||
exc = ProviderConnectionError(
|
||||
f"Provider {provider_instance.provider} is not connected: {e}"
|
||||
)
|
||||
finally:
|
||||
@@ -149,7 +173,8 @@ def perform_prowler_scan(
|
||||
provider_instance.save()
|
||||
|
||||
# If the provider is not connected, raise an exception outside the transaction.
|
||||
# If raised within the transaction, the transaction will be rolled back and the provider will not be marked as not connected.
|
||||
# If raised within the transaction, the transaction will be rolled back and the provider will not be marked
|
||||
# as not connected.
|
||||
if exc:
|
||||
raise exc
|
||||
|
||||
@@ -158,6 +183,7 @@ def perform_prowler_scan(
|
||||
resource_cache = {}
|
||||
tag_cache = {}
|
||||
last_status_cache = {}
|
||||
resource_failed_findings_cache = defaultdict(int)
|
||||
|
||||
for progress, findings in prowler_scan.scan():
|
||||
for finding in findings:
|
||||
@@ -183,6 +209,9 @@ def perform_prowler_scan(
|
||||
},
|
||||
)
|
||||
resource_cache[resource_uid] = resource_instance
|
||||
|
||||
# Initialize all processed resources in the cache
|
||||
resource_failed_findings_cache[resource_uid] = 0
|
||||
else:
|
||||
resource_instance = resource_cache[resource_uid]
|
||||
|
||||
@@ -273,6 +302,9 @@ def perform_prowler_scan(
|
||||
if not last_first_seen_at:
|
||||
last_first_seen_at = datetime.now(tz=timezone.utc)
|
||||
|
||||
# If the finding is muted at this time the reason must be the configured Mutelist
|
||||
muted_reason = "Muted by mutelist" if finding.muted else None
|
||||
|
||||
# Create the finding
|
||||
finding_instance = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
@@ -288,10 +320,16 @@ def perform_prowler_scan(
|
||||
scan=scan_instance,
|
||||
first_seen_at=last_first_seen_at,
|
||||
muted=finding.muted,
|
||||
muted_reason=muted_reason,
|
||||
compliance=finding.compliance,
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
# Increment failed_findings_count cache if the finding status is FAIL and not muted
|
||||
if status == FindingStatus.FAIL and not finding.muted:
|
||||
resource_uid = finding.resource_uid
|
||||
resource_failed_findings_cache[resource_uid] += 1
|
||||
|
||||
# Update scan resource summaries
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
@@ -309,6 +347,24 @@ def perform_prowler_scan(
|
||||
|
||||
scan_instance.state = StateChoices.COMPLETED
|
||||
|
||||
# Update failed_findings_count for all resources in batches if scan completed successfully
|
||||
if resource_failed_findings_cache:
|
||||
resources_to_update = []
|
||||
for resource_uid, failed_count in resource_failed_findings_cache.items():
|
||||
if resource_uid in resource_cache:
|
||||
resource_instance = resource_cache[resource_uid]
|
||||
resource_instance.failed_findings_count = failed_count
|
||||
resources_to_update.append(resource_instance)
|
||||
|
||||
if resources_to_update:
|
||||
update_objects_in_batches(
|
||||
tenant_id=tenant_id,
|
||||
model=Resource,
|
||||
objects=resources_to_update,
|
||||
fields=["failed_findings_count"],
|
||||
batch_size=1000,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error performing scan {scan_id}: {e}")
|
||||
exception = e
|
||||
@@ -361,6 +417,9 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
changed, unchanged). The results are grouped by `check_id`, `service`, `severity`, and `region`.
|
||||
These aggregated metrics are then stored in the `ScanSummary` table.
|
||||
|
||||
Additionally, it updates the failed_findings_count field for each resource based on the most
|
||||
recent findings for each finding.uid.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The ID of the tenant to which the scan belongs.
|
||||
scan_id (str): The ID of the scan for which findings need to be aggregated.
|
||||
@@ -526,21 +585,30 @@ def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
with rls_transaction(tenant_id):
|
||||
scan_instance = Scan.objects.get(pk=scan_id)
|
||||
provider_instance = scan_instance.provider
|
||||
prowler_provider = initialize_prowler_provider(provider_instance)
|
||||
prowler_provider = return_prowler_provider(provider_instance)
|
||||
|
||||
# Get check status data by region from findings
|
||||
findings = (
|
||||
Finding.all_objects.filter(scan_id=scan_id, muted=False)
|
||||
.only("id", "check_id", "status")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"resources",
|
||||
queryset=Resource.objects.only("id", "region"),
|
||||
to_attr="small_resources",
|
||||
)
|
||||
)
|
||||
.iterator(chunk_size=1000)
|
||||
)
|
||||
|
||||
check_status_by_region = {}
|
||||
with rls_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(scan_id=scan_id, muted=False)
|
||||
for finding in findings:
|
||||
# Get region from resources
|
||||
for resource in finding.resources.all():
|
||||
for resource in finding.small_resources:
|
||||
region = resource.region
|
||||
region_dict = check_status_by_region.setdefault(region, {})
|
||||
current_status = region_dict.get(finding.check_id)
|
||||
if current_status == "FAIL":
|
||||
continue
|
||||
region_dict[finding.check_id] = finding.status
|
||||
current_status = check_status_by_region.setdefault(region, {})
|
||||
if current_status.get(finding.check_id) != "FAIL":
|
||||
current_status[finding.check_id] = finding.status
|
||||
|
||||
try:
|
||||
# Try to get regions from provider
|
||||
|
||||
@@ -8,7 +8,7 @@ from config.celery import RLSTask
|
||||
from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIRECTORY
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
from tasks.jobs.connection import check_lighthouse_connection, check_provider_connection
|
||||
from tasks.jobs.deletion import delete_provider, delete_tenant
|
||||
from tasks.jobs.export import (
|
||||
COMPLIANCE_CLASS_MAP,
|
||||
@@ -17,6 +17,7 @@ from tasks.jobs.export import (
|
||||
_generate_output_directory,
|
||||
_upload_to_s3,
|
||||
)
|
||||
from tasks.jobs.report import generate_threatscore_report_job
|
||||
from tasks.jobs.scan import (
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
@@ -37,6 +38,29 @@ from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Helper function to perform tasks after a scan is completed.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant ID under which the scan was performed.
|
||||
scan_id (str): The ID of the scan that was performed.
|
||||
provider_id (str): The primary key of the Provider instance that was scanned.
|
||||
"""
|
||||
create_compliance_requirements_task.apply_async(
|
||||
kwargs={"tenant_id": tenant_id, "scan_id": scan_id}
|
||||
)
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
generate_threatscore_report_task.si(
|
||||
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
|
||||
),
|
||||
).apply_async()
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="provider-connection-check")
|
||||
@set_tenant
|
||||
def check_provider_connection_task(provider_id: str):
|
||||
@@ -103,13 +127,7 @@ def perform_scan_task(
|
||||
checks_to_execute=checks_to_execute,
|
||||
)
|
||||
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id, scan_id),
|
||||
create_compliance_requirements_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
_perform_scan_complete_tasks(tenant_id, scan_id, provider_id)
|
||||
|
||||
return result
|
||||
|
||||
@@ -214,20 +232,12 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
)
|
||||
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id, scan_instance.id),
|
||||
create_compliance_requirements_task.si(
|
||||
tenant_id=tenant_id, scan_id=str(scan_instance.id)
|
||||
),
|
||||
generate_outputs.si(
|
||||
scan_id=str(scan_instance.id), provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
_perform_scan_complete_tasks(tenant_id, str(scan_instance.id), provider_id)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@shared_task(name="scan-summary")
|
||||
@shared_task(name="scan-summary", queue="overview")
|
||||
def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
@@ -243,7 +253,7 @@ def delete_tenant_task(tenant_id: str):
|
||||
queue="scan-reports",
|
||||
)
|
||||
@set_tenant(keep_tenant=True)
|
||||
def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
|
||||
def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
"""
|
||||
Process findings in batches and generate output files in multiple formats.
|
||||
|
||||
@@ -271,7 +281,7 @@ def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
|
||||
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
frameworks_avail = get_compliance_frameworks(provider_type)
|
||||
out_dir, comp_dir = _generate_output_directory(
|
||||
out_dir, comp_dir, _ = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
)
|
||||
|
||||
@@ -381,7 +391,7 @@ def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
return backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews")
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="overview")
|
||||
def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates detailed compliance requirement records for a scan.
|
||||
@@ -395,3 +405,42 @@ def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
scan_id (str): The ID of the scan for which to create records.
|
||||
"""
|
||||
return create_compliance_requirements(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="lighthouse-connection-check")
|
||||
@set_tenant
|
||||
def check_lighthouse_connection_task(lighthouse_config_id: str, tenant_id: str = None):
|
||||
"""
|
||||
Task to check the connection status of a Lighthouse configuration.
|
||||
|
||||
Args:
|
||||
lighthouse_config_id (str): The primary key of the LighthouseConfiguration instance to check.
|
||||
tenant_id (str): The tenant ID for the task.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing:
|
||||
- 'connected' (bool): Indicates whether the connection is successful.
|
||||
- 'error' (str or None): The error message if the connection failed, otherwise `None`.
|
||||
- 'available_models' (list): List of available models if connection is successful.
|
||||
"""
|
||||
return check_lighthouse_connection(lighthouse_config_id=lighthouse_config_id)
|
||||
|
||||
|
||||
@shared_task(
|
||||
base=RLSTask,
|
||||
name="scan-threatscore-report",
|
||||
queue="scan-reports",
|
||||
)
|
||||
@set_tenant(keep_tenant=True)
|
||||
def generate_threatscore_report_task(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Task to generate a threatscore report for a given scan.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier.
|
||||
scan_id (str): The scan identifier.
|
||||
provider_id (str): The provider identifier.
|
||||
"""
|
||||
return generate_threatscore_report_job(
|
||||
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
|
||||
)
|
||||
|
||||
@@ -3,9 +3,9 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.beat import schedule_provider_scan
|
||||
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import Scan
|
||||
|
||||
|
||||
@@ -48,10 +48,29 @@ class TestScheduleProviderScan:
|
||||
with patch("tasks.tasks.perform_scheduled_scan_task.apply_async"):
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
# Now, try scheduling again, should raise ValidationError
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
# Now, try scheduling again, should raise ConflictException
|
||||
with pytest.raises(ConflictException) as exc_info:
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
assert "There is already a scheduled scan for this provider." in str(
|
||||
exc_info.value
|
||||
)
|
||||
|
||||
def test_remove_periodic_task(self, providers_fixture):
|
||||
provider_instance = providers_fixture[0]
|
||||
|
||||
assert Scan.objects.count() == 0
|
||||
with patch("tasks.tasks.perform_scheduled_scan_task.apply_async"):
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
assert Scan.objects.count() == 1
|
||||
scan = Scan.objects.first()
|
||||
periodic_task = scan.scheduler_task
|
||||
assert periodic_task is not None
|
||||
|
||||
periodic_task.delete()
|
||||
|
||||
scan.refresh_from_db()
|
||||
# Assert the scan still exists but its scheduler_task is set to None
|
||||
# Otherwise, Scan.DoesNotExist would be raised
|
||||
assert Scan.objects.get(id=scan.id).scheduler_task is None
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import patch, MagicMock
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from tasks.jobs.connection import check_lighthouse_connection, check_provider_connection
|
||||
|
||||
from api.models import Provider
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
from api.models import LighthouseConfiguration, Provider
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -70,3 +70,60 @@ def test_check_provider_connection_exception(
|
||||
|
||||
mock_provider_instance.save.assert_called_once()
|
||||
assert mock_provider_instance.connected is False
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lighthouse_data",
|
||||
[
|
||||
{
|
||||
"name": "OpenAI",
|
||||
"api_key_decoded": "sk-test1234567890T3BlbkFJtest1234567890",
|
||||
"model": "gpt-4o",
|
||||
"temperature": 0,
|
||||
"max_tokens": 4000,
|
||||
"business_context": "Test business context",
|
||||
"is_active": True,
|
||||
},
|
||||
],
|
||||
)
|
||||
@patch("tasks.jobs.connection.openai.OpenAI")
|
||||
@pytest.mark.django_db
|
||||
def test_check_lighthouse_connection(
|
||||
mock_openai_client, tenants_fixture, lighthouse_data
|
||||
):
|
||||
lighthouse_config = LighthouseConfiguration.objects.create(
|
||||
**lighthouse_data, tenant_id=tenants_fixture[0].id
|
||||
)
|
||||
|
||||
mock_models = MagicMock()
|
||||
mock_models.data = [MagicMock(id="gpt-4o"), MagicMock(id="gpt-4o-mini")]
|
||||
mock_openai_client.return_value.models.list.return_value = mock_models
|
||||
|
||||
result = check_lighthouse_connection(
|
||||
lighthouse_config_id=str(lighthouse_config.id),
|
||||
)
|
||||
lighthouse_config.refresh_from_db()
|
||||
|
||||
mock_openai_client.assert_called_once_with(
|
||||
api_key=lighthouse_data["api_key_decoded"]
|
||||
)
|
||||
assert lighthouse_config.is_active is True
|
||||
assert result["connected"] is True
|
||||
assert result["error"] is None
|
||||
assert result["available_models"] == ["gpt-4o", "gpt-4o-mini"]
|
||||
|
||||
|
||||
@patch("tasks.jobs.connection.LighthouseConfiguration.objects.get")
|
||||
@pytest.mark.django_db
|
||||
def test_check_lighthouse_connection_missing_api_key(mock_lighthouse_get):
|
||||
mock_lighthouse_instance = MagicMock()
|
||||
mock_lighthouse_instance.api_key_decoded = None
|
||||
mock_lighthouse_get.return_value = mock_lighthouse_instance
|
||||
|
||||
result = check_lighthouse_connection("lighthouse_config_id")
|
||||
|
||||
assert result["connected"] is False
|
||||
assert result["error"] == "API key is invalid or missing."
|
||||
assert result["available_models"] == []
|
||||
assert mock_lighthouse_instance.is_active is False
|
||||
mock_lighthouse_instance.save.assert_called_once()
|
||||
|
||||
@@ -136,12 +136,35 @@ class TestOutputs:
|
||||
scan_id = "s1"
|
||||
provider = "aws"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
path, compliance, threatscore = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
assert os.path.isdir(os.path.dirname(threatscore))
|
||||
|
||||
assert path.endswith(f"{provider}-{output_file_timestamp}")
|
||||
assert compliance.endswith(f"{provider}-{output_file_timestamp}")
|
||||
assert threatscore.endswith(f"{provider}-{output_file_timestamp}")
|
||||
|
||||
def test_generate_output_directory_invalid_character(self, tmpdir):
|
||||
from prowler.config.config import output_file_timestamp
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("generate_output")))
|
||||
base_dir = str(base_tmp)
|
||||
tenant_id = "t1"
|
||||
scan_id = "s1"
|
||||
provider = "aws/test@check"
|
||||
|
||||
path, compliance, threatscore = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
assert os.path.isdir(os.path.dirname(threatscore))
|
||||
|
||||
assert path.endswith(f"aws-test-check-{output_file_timestamp}")
|
||||
assert compliance.endswith(f"aws-test-check-{output_file_timestamp}")
|
||||
assert threatscore.endswith(f"aws-test-check-{output_file_timestamp}")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,9 +3,10 @@ from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from tasks.tasks import generate_outputs
|
||||
from tasks.tasks import _perform_scan_complete_tasks, generate_outputs_task
|
||||
|
||||
|
||||
# TODO Move this to outputs/reports jobs
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateOutputs:
|
||||
def setup_method(self):
|
||||
@@ -17,7 +18,7 @@ class TestGenerateOutputs:
|
||||
with patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -99,7 +100,7 @@ class TestGenerateOutputs:
|
||||
mock_compress.return_value = "/tmp/zipped.zip"
|
||||
mock_upload.return_value = "s3://bucket/zipped.zip"
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -150,7 +151,7 @@ class TestGenerateOutputs:
|
||||
True,
|
||||
]
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id="scan",
|
||||
provider_id="provider",
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -208,7 +209,7 @@ class TestGenerateOutputs:
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
generate_outputs(
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -276,7 +277,7 @@ class TestGenerateOutputs:
|
||||
}
|
||||
},
|
||||
):
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -346,7 +347,7 @@ class TestGenerateOutputs:
|
||||
):
|
||||
mock_summary.return_value.exists.return_value = True
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -407,9 +408,41 @@ class TestGenerateOutputs:
|
||||
),
|
||||
):
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_outputs(
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
|
||||
|
||||
class TestScanCompleteTasks:
|
||||
@patch("tasks.tasks.create_compliance_requirements_task.apply_async")
|
||||
@patch("tasks.tasks.perform_scan_summary_task.si")
|
||||
@patch("tasks.tasks.generate_outputs_task.si")
|
||||
@patch("tasks.tasks.generate_threatscore_report_task.si")
|
||||
def test_scan_complete_tasks(
|
||||
self,
|
||||
mock_threatscore_task,
|
||||
mock_outputs_task,
|
||||
mock_scan_summary_task,
|
||||
mock_compliance_tasks,
|
||||
):
|
||||
_perform_scan_complete_tasks("tenant-id", "scan-id", "provider-id")
|
||||
mock_compliance_tasks.assert_called_once_with(
|
||||
kwargs={"tenant_id": "tenant-id", "scan_id": "scan-id"},
|
||||
)
|
||||
mock_scan_summary_task.assert_called_once_with(
|
||||
scan_id="scan-id",
|
||||
tenant_id="tenant-id",
|
||||
)
|
||||
mock_outputs_task.assert_called_once_with(
|
||||
scan_id="scan-id",
|
||||
provider_id="provider-id",
|
||||
tenant_id="tenant-id",
|
||||
)
|
||||
mock_threatscore_task.assert_called_once_with(
|
||||
scan_id="scan-id",
|
||||
provider_id="provider-id",
|
||||
tenant_id="tenant-id",
|
||||
)
|
||||
|
||||
367
api/src/backend/tasks/tests/test_threatscore.py
Normal file
367
api/src/backend/tasks/tests/test_threatscore.py
Normal file
@@ -0,0 +1,367 @@
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from tasks.jobs.report import (
|
||||
generate_threatscore_report,
|
||||
generate_threatscore_report_job,
|
||||
)
|
||||
from tasks.tasks import generate_threatscore_report_task
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReport:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
|
||||
def test_no_findings_returns_early(self):
|
||||
with patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
mock_filter.assert_called_once_with(scan_id=self.scan_id)
|
||||
|
||||
@patch("tasks.jobs.report.rmtree")
|
||||
@patch("tasks.jobs.report._upload_to_s3")
|
||||
@patch("tasks.jobs.report.generate_threatscore_report")
|
||||
@patch("tasks.jobs.report._generate_output_directory")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.ScanSummary.objects.filter")
|
||||
@patch("tasks.jobs.report.Scan.all_objects.filter")
|
||||
def test_generate_threatscore_report_happy_path(
|
||||
self,
|
||||
mock_scan_update,
|
||||
mock_scan_summary_filter,
|
||||
mock_provider_get,
|
||||
mock_generate_output_directory,
|
||||
mock_generate_report,
|
||||
mock_upload,
|
||||
mock_rmtree,
|
||||
):
|
||||
mock_scan_summary_filter.return_value.exists.return_value = True
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_generate_output_directory.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
mock_upload.return_value = "s3://bucket/threatscore_report.pdf"
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
mock_generate_report.assert_called_once_with(
|
||||
scan_id=self.scan_id,
|
||||
compliance_id="prowler_threatscore_aws",
|
||||
output_path="/tmp/threatscore_path_threatscore_report.pdf",
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
mock_scan_update.return_value.update.assert_called_once_with(
|
||||
output_location="s3://bucket/threatscore_report.pdf"
|
||||
)
|
||||
mock_rmtree.assert_called_once_with(
|
||||
Path("/tmp/threatscore_path_threatscore_report.pdf").parent,
|
||||
ignore_errors=True,
|
||||
)
|
||||
|
||||
def test_generate_threatscore_report_fails_upload(self):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report"),
|
||||
patch("tasks.jobs.report._upload_to_s3", return_value=None),
|
||||
patch("tasks.jobs.report.Scan.all_objects.filter") as mock_scan_update,
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
# Mock provider
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "aws-provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
mock_scan_update.return_value.update.assert_called_once()
|
||||
|
||||
def test_generate_threatscore_report_logs_rmtree_exception(self, caplog):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report"),
|
||||
patch(
|
||||
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
|
||||
),
|
||||
patch("tasks.jobs.report.Scan.all_objects.filter"),
|
||||
patch(
|
||||
"tasks.jobs.report.rmtree", side_effect=Exception("Test deletion error")
|
||||
),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
# Mock provider
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "aws-provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
|
||||
def test_generate_threatscore_report_azure_provider(self):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report") as mock_generate,
|
||||
patch(
|
||||
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
|
||||
),
|
||||
patch("tasks.jobs.report.Scan.all_objects.filter"),
|
||||
patch("tasks.jobs.report.rmtree"),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "azure-provider-uid"
|
||||
mock_provider.provider = "azure"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
mock_generate.assert_called_once_with(
|
||||
scan_id=self.scan_id,
|
||||
compliance_id="prowler_threatscore_azure",
|
||||
output_path="/tmp/threatscore_path_threatscore_report.pdf",
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReportFunction:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.compliance_id = "prowler_threatscore_aws"
|
||||
self.output_path = "/tmp/test_threatscore_report.pdf"
|
||||
|
||||
@patch("tasks.jobs.report.initialize_prowler_provider")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.Compliance.get_bulk")
|
||||
@patch("tasks.jobs.report.Finding.all_objects.filter")
|
||||
@patch("tasks.jobs.report.batched")
|
||||
@patch("tasks.jobs.report.FindingOutput.transform_api_finding")
|
||||
@patch("tasks.jobs.report.SimpleDocTemplate")
|
||||
@patch("tasks.jobs.report.Image")
|
||||
@patch("tasks.jobs.report.Spacer")
|
||||
@patch("tasks.jobs.report.Paragraph")
|
||||
@patch("tasks.jobs.report.PageBreak")
|
||||
@patch("tasks.jobs.report.Table")
|
||||
@patch("tasks.jobs.report.TableStyle")
|
||||
@patch("tasks.jobs.report.plt.subplots")
|
||||
@patch("tasks.jobs.report.plt.savefig")
|
||||
@patch("tasks.jobs.report.io.BytesIO")
|
||||
def test_generate_threatscore_report_success(
|
||||
self,
|
||||
mock_bytesio,
|
||||
mock_savefig,
|
||||
mock_subplots,
|
||||
mock_table_style,
|
||||
mock_table,
|
||||
mock_page_break,
|
||||
mock_paragraph,
|
||||
mock_spacer,
|
||||
mock_image,
|
||||
mock_doc_template,
|
||||
mock_transform_finding,
|
||||
mock_batched,
|
||||
mock_finding_filter,
|
||||
mock_compliance_get_bulk,
|
||||
mock_provider_get,
|
||||
mock_initialize_provider,
|
||||
):
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
prowler_provider = MagicMock()
|
||||
mock_initialize_provider.return_value = prowler_provider
|
||||
|
||||
mock_compliance_obj = MagicMock()
|
||||
mock_compliance_obj.Framework = "ProwlerThreatScore"
|
||||
mock_compliance_obj.Version = "1.0"
|
||||
mock_compliance_obj.Description = "Test Description"
|
||||
mock_compliance_obj.Requirements = []
|
||||
mock_compliance_get_bulk.return_value = {
|
||||
self.compliance_id: mock_compliance_obj
|
||||
}
|
||||
|
||||
mock_finding = MagicMock()
|
||||
mock_finding.uid = "finding-1"
|
||||
mock_finding_filter.return_value.order_by.return_value.iterator.return_value = [
|
||||
mock_finding
|
||||
]
|
||||
|
||||
mock_batched.return_value = [([mock_finding], True)]
|
||||
|
||||
mock_transformed_finding = MagicMock()
|
||||
mock_transformed_finding.check_id = "check-1"
|
||||
mock_transformed_finding.status = "FAIL"
|
||||
mock_transformed_finding.description = "Test finding"
|
||||
mock_transformed_finding.metadata = MagicMock()
|
||||
mock_transformed_finding.metadata.CheckTitle = "Test Check"
|
||||
mock_transformed_finding.metadata.Severity = "HIGH"
|
||||
mock_transformed_finding.resource_name = "test-resource"
|
||||
mock_transformed_finding.region = "us-east-1"
|
||||
mock_transform_finding.return_value = mock_transformed_finding
|
||||
|
||||
mock_doc = MagicMock()
|
||||
mock_doc_template.return_value = mock_doc
|
||||
|
||||
mock_fig, mock_ax = MagicMock(), MagicMock()
|
||||
mock_subplots.return_value = (mock_fig, mock_ax)
|
||||
mock_buffer = MagicMock()
|
||||
mock_bytesio.return_value = mock_buffer
|
||||
|
||||
mock_image.return_value = MagicMock()
|
||||
mock_spacer.return_value = MagicMock()
|
||||
mock_paragraph.return_value = MagicMock()
|
||||
mock_page_break.return_value = MagicMock()
|
||||
mock_table.return_value = MagicMock()
|
||||
mock_table_style.return_value = MagicMock()
|
||||
|
||||
generate_threatscore_report(
|
||||
scan_id=self.scan_id,
|
||||
compliance_id=self.compliance_id,
|
||||
output_path=self.output_path,
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
mock_provider_get.assert_called_once_with(id=self.provider_id)
|
||||
mock_initialize_provider.assert_called_once_with(mock_provider)
|
||||
mock_compliance_get_bulk.assert_called_once_with("aws")
|
||||
mock_finding_filter.assert_called_once_with(scan_id=self.scan_id)
|
||||
mock_batched.assert_called_once()
|
||||
mock_transform_finding.assert_called_once_with(mock_finding, prowler_provider)
|
||||
mock_doc_template.assert_called_once()
|
||||
mock_doc.build.assert_called_once()
|
||||
|
||||
@patch("tasks.jobs.report.initialize_prowler_provider")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.Compliance.get_bulk")
|
||||
@patch("tasks.jobs.report.Finding.all_objects.filter")
|
||||
def test_generate_threatscore_report_exception_handling(
|
||||
self,
|
||||
mock_finding_filter,
|
||||
mock_compliance_get_bulk,
|
||||
mock_provider_get,
|
||||
mock_initialize_provider,
|
||||
):
|
||||
mock_provider_get.side_effect = Exception("Provider not found")
|
||||
|
||||
with pytest.raises(Exception, match="Provider not found"):
|
||||
generate_threatscore_report(
|
||||
scan_id=self.scan_id,
|
||||
compliance_id=self.compliance_id,
|
||||
output_path=self.output_path,
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReportTask:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
|
||||
@patch("tasks.tasks.generate_threatscore_report_job")
|
||||
def test_generate_threatscore_report_task_calls_job(self, mock_generate_job):
|
||||
mock_generate_job.return_value = {"upload": True}
|
||||
|
||||
result = generate_threatscore_report_task(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
mock_generate_job.assert_called_once_with(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
@patch("tasks.tasks.generate_threatscore_report_job")
|
||||
def test_generate_threatscore_report_task_handles_job_exception(
|
||||
self, mock_generate_job
|
||||
):
|
||||
mock_generate_job.side_effect = Exception("Job failed")
|
||||
|
||||
with pytest.raises(Exception, match="Job failed"):
|
||||
generate_threatscore_report_task(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
128
api/tests/performance/scenarios/compliance.py
Normal file
128
api/tests/performance/scenarios/compliance.py
Normal file
@@ -0,0 +1,128 @@
|
||||
import random
|
||||
from collections import defaultdict
|
||||
|
||||
import requests
|
||||
from locust import events, task
|
||||
from utils.helpers import APIUserBase, get_api_token, get_auth_headers
|
||||
|
||||
GLOBAL = {
|
||||
"token": None,
|
||||
"available_scans_info": {},
|
||||
}
|
||||
SUPPORTED_COMPLIANCE_IDS = {
|
||||
"aws": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
|
||||
"gcp": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
|
||||
"azure": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
|
||||
"m365": ["cis_4.0", "iso27001_2022", "prowler_threatscore"],
|
||||
}
|
||||
|
||||
|
||||
def _get_random_scan() -> tuple:
|
||||
provider_type = random.choice(list(GLOBAL["available_scans_info"].keys()))
|
||||
scan_info = random.choice(GLOBAL["available_scans_info"][provider_type])
|
||||
return provider_type, scan_info
|
||||
|
||||
|
||||
def _get_random_compliance_id(provider: str) -> str:
|
||||
return f"{random.choice(SUPPORTED_COMPLIANCE_IDS[provider])}_{provider}"
|
||||
|
||||
|
||||
def _get_compliance_available_scans_by_provider_type(host: str, token: str) -> dict:
|
||||
excluded_providers = ["kubernetes"]
|
||||
|
||||
response_dict = defaultdict(list)
|
||||
provider_response = requests.get(
|
||||
f"{host}/providers?fields[providers]=id,provider&filter[connected]=true",
|
||||
headers=get_auth_headers(token),
|
||||
)
|
||||
for provider in provider_response.json()["data"]:
|
||||
provider_id = provider["id"]
|
||||
provider_type = provider["attributes"]["provider"]
|
||||
if provider_type in excluded_providers:
|
||||
continue
|
||||
|
||||
scan_response = requests.get(
|
||||
f"{host}/scans?fields[scans]=id&filter[provider]={provider_id}&filter[state]=completed",
|
||||
headers=get_auth_headers(token),
|
||||
)
|
||||
scan_data = scan_response.json()["data"]
|
||||
if not scan_data:
|
||||
continue
|
||||
scan_id = scan_data[0]["id"]
|
||||
response_dict[provider_type].append(scan_id)
|
||||
return response_dict
|
||||
|
||||
|
||||
def _get_compliance_regions_from_scan(host: str, token: str, scan_id: str) -> list:
|
||||
response = requests.get(
|
||||
f"{host}/compliance-overviews/metadata?filter[scan_id]={scan_id}",
|
||||
headers=get_auth_headers(token),
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to get scan: {response.text}"
|
||||
return response.json()["data"]["attributes"]["regions"]
|
||||
|
||||
|
||||
@events.test_start.add_listener
|
||||
def on_test_start(environment, **kwargs):
|
||||
GLOBAL["token"] = get_api_token(environment.host)
|
||||
scans_by_provider = _get_compliance_available_scans_by_provider_type(
|
||||
environment.host, GLOBAL["token"]
|
||||
)
|
||||
scan_info = defaultdict(list)
|
||||
for provider, scans in scans_by_provider.items():
|
||||
for scan in scans:
|
||||
scan_info[provider].append(
|
||||
{
|
||||
"scan_id": scan,
|
||||
"regions": _get_compliance_regions_from_scan(
|
||||
environment.host, GLOBAL["token"], scan
|
||||
),
|
||||
}
|
||||
)
|
||||
GLOBAL["available_scans_info"] = scan_info
|
||||
|
||||
|
||||
class APIUser(APIUserBase):
|
||||
def on_start(self):
|
||||
self.token = GLOBAL["token"]
|
||||
|
||||
@task(3)
|
||||
def compliance_overviews_default(self):
|
||||
provider_type, scan_info = _get_random_scan()
|
||||
name = f"/compliance-overviews ({provider_type})"
|
||||
endpoint = f"/compliance-overviews?" f"filter[scan_id]={scan_info['scan_id']}"
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(2)
|
||||
def compliance_overviews_region(self):
|
||||
provider_type, scan_info = _get_random_scan()
|
||||
name = f"/compliance-overviews?filter[region] ({provider_type})"
|
||||
endpoint = (
|
||||
f"/compliance-overviews"
|
||||
f"?filter[scan_id]={scan_info['scan_id']}"
|
||||
f"&filter[region]={random.choice(scan_info['regions'])}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(2)
|
||||
def compliance_overviews_requirements(self):
|
||||
provider_type, scan_info = _get_random_scan()
|
||||
compliance_id = _get_random_compliance_id(provider_type)
|
||||
name = f"/compliance-overviews/requirements ({compliance_id})"
|
||||
endpoint = (
|
||||
f"/compliance-overviews/requirements"
|
||||
f"?filter[scan_id]={scan_info['scan_id']}"
|
||||
f"&filter[compliance_id]={compliance_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def compliance_overviews_attributes(self):
|
||||
provider_type, _ = _get_random_scan()
|
||||
compliance_id = _get_random_compliance_id(provider_type)
|
||||
name = f"/compliance-overviews/attributes ({compliance_id})"
|
||||
endpoint = (
|
||||
f"/compliance-overviews/attributes"
|
||||
f"?filter[compliance_id]={compliance_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
234
api/tests/performance/scenarios/resources.py
Normal file
234
api/tests/performance/scenarios/resources.py
Normal file
@@ -0,0 +1,234 @@
|
||||
from locust import events, task
|
||||
from utils.config import (
|
||||
L_PROVIDER_NAME,
|
||||
M_PROVIDER_NAME,
|
||||
RESOURCES_UI_FIELDS,
|
||||
S_PROVIDER_NAME,
|
||||
TARGET_INSERTED_AT,
|
||||
)
|
||||
from utils.helpers import (
|
||||
APIUserBase,
|
||||
get_api_token,
|
||||
get_auth_headers,
|
||||
get_dynamic_filters_pairs,
|
||||
get_next_resource_filter,
|
||||
get_scan_id_from_provider_name,
|
||||
)
|
||||
|
||||
GLOBAL = {
|
||||
"token": None,
|
||||
"scan_ids": {},
|
||||
"resource_filters": None,
|
||||
"large_resource_filters": None,
|
||||
}
|
||||
|
||||
|
||||
@events.test_start.add_listener
|
||||
def on_test_start(environment, **kwargs):
|
||||
GLOBAL["token"] = get_api_token(environment.host)
|
||||
|
||||
GLOBAL["scan_ids"]["small"] = get_scan_id_from_provider_name(
|
||||
environment.host, GLOBAL["token"], S_PROVIDER_NAME
|
||||
)
|
||||
GLOBAL["scan_ids"]["medium"] = get_scan_id_from_provider_name(
|
||||
environment.host, GLOBAL["token"], M_PROVIDER_NAME
|
||||
)
|
||||
GLOBAL["scan_ids"]["large"] = get_scan_id_from_provider_name(
|
||||
environment.host, GLOBAL["token"], L_PROVIDER_NAME
|
||||
)
|
||||
|
||||
GLOBAL["resource_filters"] = get_dynamic_filters_pairs(
|
||||
environment.host, GLOBAL["token"], "resources"
|
||||
)
|
||||
GLOBAL["large_resource_filters"] = get_dynamic_filters_pairs(
|
||||
environment.host, GLOBAL["token"], "resources", GLOBAL["scan_ids"]["large"]
|
||||
)
|
||||
|
||||
|
||||
class APIUser(APIUserBase):
|
||||
def on_start(self):
|
||||
self.token = GLOBAL["token"]
|
||||
self.s_scan_id = GLOBAL["scan_ids"]["small"]
|
||||
self.m_scan_id = GLOBAL["scan_ids"]["medium"]
|
||||
self.l_scan_id = GLOBAL["scan_ids"]["large"]
|
||||
self.available_resource_filters = GLOBAL["resource_filters"]
|
||||
self.available_resource_filters_large_scan = GLOBAL["large_resource_filters"]
|
||||
|
||||
@task
|
||||
def resources_default(self):
|
||||
name = "/resources"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources?page[number]={page_number}"
|
||||
f"&filter[updated_at]={TARGET_INSERTED_AT}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def resources_default_ui_fields(self):
|
||||
name = "/resources?fields"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources?page[number]={page_number}"
|
||||
f"&fields[resources]={','.join(RESOURCES_UI_FIELDS)}"
|
||||
f"&filter[updated_at]={TARGET_INSERTED_AT}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def resources_default_include(self):
|
||||
name = "/resources?include"
|
||||
page = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources?page[number]={page}"
|
||||
f"&filter[updated_at]={TARGET_INSERTED_AT}"
|
||||
f"&include=provider"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def resources_metadata(self):
|
||||
name = "/resources/metadata"
|
||||
endpoint = f"/resources/metadata?filter[updated_at]={TARGET_INSERTED_AT}"
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def resources_scan_small(self):
|
||||
name = "/resources?filter[scan_id] - 50k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources?page[number]={page_number}" f"&filter[scan]={self.s_scan_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def resources_metadata_scan_small(self):
|
||||
name = "/resources/metadata?filter[scan_id] - 50k"
|
||||
endpoint = f"/resources/metadata?&filter[scan]={self.s_scan_id}"
|
||||
self.client.get(
|
||||
endpoint,
|
||||
headers=get_auth_headers(self.token),
|
||||
name=name,
|
||||
)
|
||||
|
||||
@task(2)
|
||||
def resources_scan_medium(self):
|
||||
name = "/resources?filter[scan_id] - 250k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources?page[number]={page_number}" f"&filter[scan]={self.m_scan_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def resources_metadata_scan_medium(self):
|
||||
name = "/resources/metadata?filter[scan_id] - 250k"
|
||||
endpoint = f"/resources/metadata?&filter[scan]={self.m_scan_id}"
|
||||
self.client.get(
|
||||
endpoint,
|
||||
headers=get_auth_headers(self.token),
|
||||
name=name,
|
||||
)
|
||||
|
||||
@task
|
||||
def resources_scan_large(self):
|
||||
name = "/resources?filter[scan_id] - 500k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources?page[number]={page_number}" f"&filter[scan]={self.l_scan_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def resources_scan_large_include(self):
|
||||
name = "/resources?filter[scan_id]&include - 500k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources?page[number]={page_number}"
|
||||
f"&filter[scan]={self.l_scan_id}"
|
||||
f"&include=provider"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def resources_metadata_scan_large(self):
|
||||
endpoint = f"/resources/metadata?&filter[scan]={self.l_scan_id}"
|
||||
self.client.get(
|
||||
endpoint,
|
||||
headers=get_auth_headers(self.token),
|
||||
name="/resources/metadata?filter[scan_id] - 500k",
|
||||
)
|
||||
|
||||
@task(2)
|
||||
def resources_filters(self):
|
||||
name = "/resources?filter[resource_filter]&include"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/resources?filter[{filter_name}]={filter_value}"
|
||||
f"&filter[updated_at]={TARGET_INSERTED_AT}"
|
||||
f"&include=provider"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def resources_metadata_filters(self):
|
||||
name = "/resources/metadata?filter[resource_filter]"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/resources/metadata?filter[{filter_name}]={filter_value}"
|
||||
f"&filter[updated_at]={TARGET_INSERTED_AT}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def resources_metadata_filters_scan_large(self):
|
||||
name = "/resources/metadata?filter[resource_filter]&filter[scan_id] - 500k"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/resources/metadata?filter[{filter_name}]={filter_value}"
|
||||
f"&filter[scan]={self.l_scan_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(2)
|
||||
def resourcess_filter_large_scan_include(self):
|
||||
name = "/resources?filter[resource_filter][scan]&include - 500k"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/resources?filter[{filter_name}]={filter_value}"
|
||||
f"&filter[scan]={self.l_scan_id}"
|
||||
f"&include=provider"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def resources_latest_default_ui_fields(self):
|
||||
name = "/resources/latest?fields"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/resources/latest?page[number]={page_number}"
|
||||
f"&fields[resources]={','.join(RESOURCES_UI_FIELDS)}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def resources_latest_metadata_filters(self):
|
||||
name = "/resources/metadata/latest?filter[resource_filter]"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = f"/resources/metadata/latest?filter[{filter_name}]={filter_value}"
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
@@ -13,6 +13,23 @@ FINDINGS_RESOURCE_METADATA = {
|
||||
"resource_types": "resource_type",
|
||||
"services": "service",
|
||||
}
|
||||
RESOURCE_METADATA = {
|
||||
"regions": "region",
|
||||
"types": "type",
|
||||
"services": "service",
|
||||
}
|
||||
|
||||
RESOURCES_UI_FIELDS = [
|
||||
"name",
|
||||
"failed_findings_count",
|
||||
"region",
|
||||
"service",
|
||||
"type",
|
||||
"provider",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"uid",
|
||||
]
|
||||
|
||||
S_PROVIDER_NAME = "provider-50k"
|
||||
M_PROVIDER_NAME = "provider-250k"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user