mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-03-27 18:38:52 +00:00
Compare commits
320 Commits
update-cha
...
5.4.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23aded92a3 | ||
|
|
6e56d3862d | ||
|
|
d95fccd163 | ||
|
|
7ddf860a55 | ||
|
|
3f41c75a45 | ||
|
|
04b6dbf639 | ||
|
|
ff4d16deb5 | ||
|
|
562921cd5e | ||
|
|
8f061e4fed | ||
|
|
3fb86d754a | ||
|
|
7874707310 | ||
|
|
1c934e37c7 | ||
|
|
8459cff16d | ||
|
|
57ae096395 | ||
|
|
200185de25 | ||
|
|
f8447b0f79 | ||
|
|
19289bbe20 | ||
|
|
b5b371fa0c | ||
|
|
939a623cec | ||
|
|
926f449ae6 | ||
|
|
646668c6ae | ||
|
|
8e6b92792b | ||
|
|
65c081ce38 | ||
|
|
5600131d6a | ||
|
|
dbd271980f | ||
|
|
ff532a899e | ||
|
|
0c9675ec70 | ||
|
|
d45eda2b2b | ||
|
|
0abcf80d19 | ||
|
|
c0e10fd395 | ||
|
|
a80e9b26a8 | ||
|
|
2a9cd57fb8 | ||
|
|
a0ad1a5f49 | ||
|
|
dff22dd166 | ||
|
|
58138810b9 | ||
|
|
1ecc272fe4 | ||
|
|
b784167006 | ||
|
|
cf0ec8dea0 | ||
|
|
96cae5e961 | ||
|
|
a48e5cb15f | ||
|
|
5a9ff007e0 | ||
|
|
24c45f894c | ||
|
|
5d03c85629 | ||
|
|
41dc397a7a | ||
|
|
237a9adce9 | ||
|
|
a06167f1c2 | ||
|
|
a7d58c40dd | ||
|
|
e260c46389 | ||
|
|
115169a596 | ||
|
|
5b19173c1d | ||
|
|
d3dd1644e6 | ||
|
|
8ff0c59964 | ||
|
|
285939c389 | ||
|
|
a62ae8af51 | ||
|
|
5d78b9e439 | ||
|
|
1056c270ca | ||
|
|
eeef6600b7 | ||
|
|
e142f17abe | ||
|
|
a65d858dac | ||
|
|
6235a1ba41 | ||
|
|
05007d03ee | ||
|
|
102d099947 | ||
|
|
3194675a5c | ||
|
|
14e6e4aa68 | ||
|
|
b24c3665b5 | ||
|
|
1f60878867 | ||
|
|
2dd18662d8 | ||
|
|
175360dbe6 | ||
|
|
80e24b971f | ||
|
|
78877c470a | ||
|
|
9c9b100359 | ||
|
|
10f3232294 | ||
|
|
a2e5f70f36 | ||
|
|
8d8b31c757 | ||
|
|
cba1e718b9 | ||
|
|
6c3c37fc26 | ||
|
|
b610cacd0c | ||
|
|
027a5705cb | ||
|
|
b7fbfb4360 | ||
|
|
5acf0a7e3d | ||
|
|
3a25e86e30 | ||
|
|
50f1592eb3 | ||
|
|
0f2927cb88 | ||
|
|
b4e1434052 | ||
|
|
43710783f9 | ||
|
|
16f767e7b9 | ||
|
|
42818217a0 | ||
|
|
13405594b2 | ||
|
|
b5a3852334 | ||
|
|
181ff1acb3 | ||
|
|
91e59a3279 | ||
|
|
b3fad1a765 | ||
|
|
6f90927a79 | ||
|
|
0bb4a9a3e9 | ||
|
|
80d9cde60b | ||
|
|
11196c2f83 | ||
|
|
55a0d0a1b5 | ||
|
|
4e5e1d7bd4 | ||
|
|
06a0a434ab | ||
|
|
153833fc55 | ||
|
|
fc4877975f | ||
|
|
0797efd4fd | ||
|
|
fbec99a0b7 | ||
|
|
b2cb1de95e | ||
|
|
190c2316d7 | ||
|
|
f6c352281a | ||
|
|
66dfe89936 | ||
|
|
8b3942ca49 | ||
|
|
9d35213bd5 | ||
|
|
3e586e615d | ||
|
|
a4f950e093 | ||
|
|
7c2441f6ff | ||
|
|
0a92af3eb2 | ||
|
|
666f3a0e20 | ||
|
|
06ef98b5cc | ||
|
|
79125bdd40 | ||
|
|
e8e8b085ac | ||
|
|
c9b81d003a | ||
|
|
23fa3c1e38 | ||
|
|
03fbd0baca | ||
|
|
d4fe24ef47 | ||
|
|
9c5220ee98 | ||
|
|
6491bce5a6 | ||
|
|
ca375dd79c | ||
|
|
e807573b54 | ||
|
|
c0f4c9743f | ||
|
|
5974d0b5da | ||
|
|
6244a8a5f7 | ||
|
|
5b9dae4529 | ||
|
|
a424374c44 | ||
|
|
b7fc2542e8 | ||
|
|
83a1598a1e | ||
|
|
b22b56a06b | ||
|
|
5020e4713c | ||
|
|
ee534a740e | ||
|
|
48cb45b7a8 | ||
|
|
91b74822e9 | ||
|
|
287eef5085 | ||
|
|
45d359c84a | ||
|
|
6049e5d4e8 | ||
|
|
dfd377f89e | ||
|
|
37e6c52c14 | ||
|
|
d6a7f4d88f | ||
|
|
239cda0a90 | ||
|
|
4a821e425b | ||
|
|
e1a2f0c204 | ||
|
|
c70860c733 | ||
|
|
05e71e033f | ||
|
|
5164ec2eb9 | ||
|
|
be18dac4f9 | ||
|
|
bb126c242f | ||
|
|
e27780a856 | ||
|
|
196ec51751 | ||
|
|
86abf9e64c | ||
|
|
9d8be578e3 | ||
|
|
b3aa800082 | ||
|
|
501674a778 | ||
|
|
5ff6ae79d8 | ||
|
|
e518a869ab | ||
|
|
43927a62f3 | ||
|
|
335980c8d8 | ||
|
|
ca3ee378db | ||
|
|
c05bc1068a | ||
|
|
2e3164636d | ||
|
|
c34e07fc40 | ||
|
|
6022122a61 | ||
|
|
f65f5e4b46 | ||
|
|
dee17733a0 | ||
|
|
cddda1e64e | ||
|
|
f7b873db03 | ||
|
|
792bc70d0a | ||
|
|
185491b061 | ||
|
|
3af8a43480 | ||
|
|
fd78406b29 | ||
|
|
4758b258a3 | ||
|
|
015e2b3b88 | ||
|
|
e184c9cb61 | ||
|
|
9004a01183 | ||
|
|
dd65ba3d9e | ||
|
|
bba616a18f | ||
|
|
aa0f8d2981 | ||
|
|
2511d6ffa9 | ||
|
|
27329457be | ||
|
|
7189f3d526 | ||
|
|
58e7589c9d | ||
|
|
d60f4b5ded | ||
|
|
4c2ec094f6 | ||
|
|
395ecaff5b | ||
|
|
c39506ef7d | ||
|
|
eb90d479e2 | ||
|
|
b92a73f5ea | ||
|
|
ad121f3059 | ||
|
|
70e4c5a44e | ||
|
|
b5a46b7b59 | ||
|
|
f1a97cd166 | ||
|
|
0774508093 | ||
|
|
0664ce6b94 | ||
|
|
407c779c52 | ||
|
|
c60f13f23f | ||
|
|
37d912ef01 | ||
|
|
d3de89c017 | ||
|
|
cb22af25c6 | ||
|
|
a534b94495 | ||
|
|
6262b4ff0b | ||
|
|
84ecd7ab2c | ||
|
|
1a5428445a | ||
|
|
ac8e991ca0 | ||
|
|
83a0331472 | ||
|
|
cce31e2971 | ||
|
|
0adf7d6e77 | ||
|
|
295f8b557e | ||
|
|
bb2c5c3161 | ||
|
|
0018f36a36 | ||
|
|
857de84f49 | ||
|
|
9630f2242a | ||
|
|
1fe125867c | ||
|
|
0737893240 | ||
|
|
282fe3d348 | ||
|
|
b5d83640ae | ||
|
|
2823d3ad21 | ||
|
|
00b93bfe86 | ||
|
|
84c253d887 | ||
|
|
2ab5a702c9 | ||
|
|
11d9cdf24e | ||
|
|
b1de41619b | ||
|
|
18a4881a51 | ||
|
|
de76a168c0 | ||
|
|
bcc13a742d | ||
|
|
23b584f4bf | ||
|
|
074b7c1ff5 | ||
|
|
c04e9ed914 | ||
|
|
1f1b126e79 | ||
|
|
9b0dd80f13 | ||
|
|
b0807478f2 | ||
|
|
11dfa58ecd | ||
|
|
412f396e0a | ||
|
|
8100d43ff2 | ||
|
|
bc96acef48 | ||
|
|
6e9876b61a | ||
|
|
32253ca4f7 | ||
|
|
4c6be5e283 | ||
|
|
69178fd7bd | ||
|
|
cd4432c14b | ||
|
|
0e47172aec | ||
|
|
efe18ae8b2 | ||
|
|
d5e13df4fe | ||
|
|
b0e84d74f2 | ||
|
|
28526b591c | ||
|
|
51e6a6edb1 | ||
|
|
c1b132a29e | ||
|
|
e2530e7d57 | ||
|
|
f2fcb1599b | ||
|
|
160eafa0c9 | ||
|
|
c2bc0f1368 | ||
|
|
27d27fff81 | ||
|
|
66e8f0ce18 | ||
|
|
0ceae8361b | ||
|
|
5e52fded83 | ||
|
|
0a7d07b4b6 | ||
|
|
34b5f483d7 | ||
|
|
9d04a1bc52 | ||
|
|
b25c0aaa00 | ||
|
|
652b93ea45 | ||
|
|
ccb7726511 | ||
|
|
c514a4e451 | ||
|
|
d841bd6890 | ||
|
|
ff54e10ab2 | ||
|
|
718e562741 | ||
|
|
ce05e2a939 | ||
|
|
90831d3084 | ||
|
|
2c928dead5 | ||
|
|
3ffe147664 | ||
|
|
3373b0f47c | ||
|
|
b29db04560 | ||
|
|
f964a0362e | ||
|
|
537b23dfae | ||
|
|
48cf3528b4 | ||
|
|
3c4b9d32c9 | ||
|
|
fd8361ae2c | ||
|
|
33cadaa932 | ||
|
|
cc126eb8b4 | ||
|
|
c996b3f6aa | ||
|
|
e2b406a300 | ||
|
|
c2c69da603 | ||
|
|
4e51348ff2 | ||
|
|
3257b82706 | ||
|
|
c98d764daa | ||
|
|
0448429a9f | ||
|
|
b948ac6125 | ||
|
|
3acc09ea16 | ||
|
|
82d0d0de9d | ||
|
|
f9cfc4d087 | ||
|
|
7d68ff455b | ||
|
|
ddf4881971 | ||
|
|
9ad4944142 | ||
|
|
7f33ea76a4 | ||
|
|
1140c29384 | ||
|
|
2441a62f39 | ||
|
|
c26a231fc1 | ||
|
|
2fb2315037 | ||
|
|
a9e475481a | ||
|
|
826d7c4dc3 | ||
|
|
b7f4b37f66 | ||
|
|
193d691bfe | ||
|
|
a359bc581c | ||
|
|
9a28ff025a | ||
|
|
f1c7050700 | ||
|
|
9391c27b9e | ||
|
|
4c54de092f | ||
|
|
690c482a43 | ||
|
|
ad2d857c6f | ||
|
|
07ee59d2ef | ||
|
|
bec4617d0a | ||
|
|
94916f8305 | ||
|
|
44de651be3 | ||
|
|
bdcba9c642 | ||
|
|
c172f75f1a | ||
|
|
ec492fa13a | ||
|
|
702659959c | ||
|
|
fef332a591 |
21
.env
21
.env
@@ -4,7 +4,7 @@
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
SITE_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
@@ -24,10 +24,6 @@ POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Celery-Prowler task settings
|
||||
TASK_RETRY_DELAY_SECONDS=0.1
|
||||
TASK_RETRY_ATTEMPTS=5
|
||||
|
||||
# Valkey settings
|
||||
# If running Valkey and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=valkey
|
||||
@@ -37,10 +33,10 @@ VALKEY_DB=0
|
||||
# API scan settings
|
||||
|
||||
# The path to the directory where scan output should be stored
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY = "/tmp/prowler_api_output"
|
||||
|
||||
# The maximum number of findings to process in a single batch
|
||||
DJANGO_FINDINGS_BATCH_SIZE=1000
|
||||
DJANGO_FINDINGS_BATCH_SIZE = 1000
|
||||
|
||||
# The AWS access key to be used when uploading scan output to an S3 bucket
|
||||
# If left empty, default AWS credentials resolution behavior will be used
|
||||
@@ -127,13 +123,4 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.6.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.4.0
|
||||
|
||||
156
.github/dependabot.yml
vendored
156
.github/dependabot.yml
vendored
@@ -9,112 +9,108 @@ updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/api"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "component/api"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/api"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "npm"
|
||||
# directory: "/ui"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "npm"
|
||||
# - "component/ui"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/ui"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "npm"
|
||||
- "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/04/15
|
||||
# v4.6
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v4"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v4"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v4"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v4"
|
||||
|
||||
# - package-ecosystem: "docker"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "docker"
|
||||
# - "v4"
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
- "v4"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# v3
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v3"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v3"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
|
||||
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@@ -16,7 +16,6 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/prowler/CHANGELOG.md), if applicable.
|
||||
|
||||
#### API
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -70,18 +70,18 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
6
.github/workflows/api-codeql.yml
vendored
6
.github/workflows/api-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
34
.github/workflows/api-pull-request.yml
vendored
34
.github/workflows/api-pull-request.yml
vendored
@@ -71,11 +71,11 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
@@ -85,30 +85,16 @@ jobs:
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Update poetry.lock after the branch name change
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -117,7 +103,7 @@ jobs:
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -159,7 +145,7 @@ jobs:
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429
|
||||
poetry run safety check --ignore 70612,66963
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
@@ -181,7 +167,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -189,11 +175,11 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
|
||||
4
.github/workflows/backport.yml
vendored
4
.github/workflows/backport.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
|
||||
2
.github/workflows/conventional-commit.yml
vendored
2
.github/workflows/conventional-commit.yml
vendored
@@ -18,6 +18,6 @@ jobs:
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
uses: agenthunt/conventional-commit-checker-action@v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'
|
||||
|
||||
67
.github/workflows/create-backport-label.yml
vendored
67
.github/workflows/create-backport-label.yml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Create Backport Label
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
create_label:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Create backport label
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
OWNER_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
|
||||
|
||||
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
|
||||
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
|
||||
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
|
||||
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
|
||||
echo "Effective description will be: ${FINAL_DESCRIPTION}"
|
||||
|
||||
# Check if the label already exists
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
|
||||
|
||||
if [ "${STATUS_CODE}" -eq 200 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' already exists."
|
||||
elif [ "${STATUS_CODE}" -eq 404 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
|
||||
# Prepare JSON data payload
|
||||
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
|
||||
|
||||
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
--data "${JSON_DATA}" \
|
||||
"https://api.github.com/repos/${OWNER_REPO}/labels")
|
||||
|
||||
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
|
||||
rm -f /tmp/curl_create_response.json
|
||||
|
||||
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' created successfully."
|
||||
else
|
||||
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
|
||||
echo "Response: $CREATE_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
|
||||
exit 0
|
||||
fi
|
||||
4
.github/workflows/find-secrets.yml
vendored
4
.github/workflows/find-secrets.yml
vendored
@@ -7,11 +7,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@b06f6d72a3791308bb7ba59c2b8cb7a083bd17e4 # v3.88.26
|
||||
uses: trufflesecurity/trufflehog@v3.88.14
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
- uses: actions/labeler@v5
|
||||
|
||||
37
.github/workflows/pull-request-merged.yml
vendored
37
.github/workflows/pull-request-merged.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: Prowler - Merged Pull Request
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['closed']
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
name: Trigger Cloud Pull Request
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-pull-request-merged
|
||||
client-payload: '{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_TITLE": "${{ github.event.pull_request.title }}",
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL":${{ toJson(github.event.pull_request.html_url) }}
|
||||
}'
|
||||
@@ -59,16 +59,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==2.*
|
||||
pipx install poetry==1.8.5
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
@@ -108,13 +108,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -123,11 +123,11 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
145
.github/workflows/sdk-bump-version.yml
vendored
145
.github/workflows/sdk-bump-version.yml
vendored
@@ -1,145 +0,0 @@
|
||||
name: SDK - Bump Version
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
name: Bump Version
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Prowler version
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
FIX_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to GitHub environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIX_VERSION=${FIX_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if (( MAJOR_VERSION == 5 )); then
|
||||
if (( FIX_VERSION == 0 )); then
|
||||
echo "Minor Release: $PROWLER_VERSION"
|
||||
|
||||
# Set up next minor version for master
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).${FIX_VERSION}
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=${BASE_BRANCH}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set up patch version for version branch
|
||||
PATCH_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
echo "PATCH_VERSION_TO=${PATCH_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next minor version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
echo "Bumping to next patch version: ${PATCH_VERSION_TO} in branch ${VERSION_BRANCH}"
|
||||
else
|
||||
echo "Patch Release: $PROWLER_VERSION"
|
||||
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.$((FIX_VERSION + 1))
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next patch version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
fi
|
||||
else
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Bump versions in files
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using BUMP_VERSION_TO=$BUMP_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${BUMP_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${BUMP_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${BUMP_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.TARGET_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.BUMP_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Handle patch version for minor release
|
||||
if: env.FIX_VERSION == '0'
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using PATCH_VERSION_TO=$PATCH_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${PATCH_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${PATCH_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PATCH_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request for patch version
|
||||
if: env.FIX_VERSION == '0'
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.PATCH_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
8
.github/workflows/sdk-codeql.yml
vendored
8
.github/workflows/sdk-codeql.yml
vendored
@@ -21,7 +21,6 @@ on:
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
@@ -31,7 +30,6 @@ on:
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
@@ -52,16 +50,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
130
.github/workflows/sdk-pull-request.yml
vendored
130
.github/workflows/sdk-pull-request.yml
vendored
@@ -21,11 +21,11 @@ jobs:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -34,7 +34,6 @@ jobs:
|
||||
permissions/**
|
||||
api/**
|
||||
ui/**
|
||||
prowler/CHANGELOG.md
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
@@ -47,11 +46,11 @@ jobs:
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -59,7 +58,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -107,128 +106,15 @@ jobs:
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
# Test AWS
|
||||
- name: AWS - Check if any file has changed
|
||||
id: aws-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/aws/**
|
||||
./tests/providers/aws/**
|
||||
.poetry.lock
|
||||
|
||||
- name: AWS - Test
|
||||
if: steps.aws-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
|
||||
# Test Azure
|
||||
- name: Azure - Check if any file has changed
|
||||
id: azure-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/azure/**
|
||||
./tests/providers/azure/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Azure - Test
|
||||
if: steps.azure-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
|
||||
|
||||
# Test GCP
|
||||
- name: GCP - Check if any file has changed
|
||||
id: gcp-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/gcp/**
|
||||
./tests/providers/gcp/**
|
||||
.poetry.lock
|
||||
|
||||
- name: GCP - Test
|
||||
if: steps.gcp-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
|
||||
|
||||
# Test Kubernetes
|
||||
- name: Kubernetes - Check if any file has changed
|
||||
id: kubernetes-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/kubernetes/**
|
||||
./tests/providers/kubernetes/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Kubernetes - Test
|
||||
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
|
||||
|
||||
# Test GitHub
|
||||
- name: GitHub - Check if any file has changed
|
||||
id: github-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/github/**
|
||||
./tests/providers/github/**
|
||||
.poetry.lock
|
||||
|
||||
- name: GitHub - Test
|
||||
if: steps.github-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
|
||||
|
||||
# Test NHN
|
||||
- name: NHN - Check if any file has changed
|
||||
id: nhn-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/nhn/**
|
||||
./tests/providers/nhn/**
|
||||
.poetry.lock
|
||||
|
||||
- name: NHN - Test
|
||||
if: steps.nhn-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
|
||||
|
||||
# Test M365
|
||||
- name: M365 - Check if any file has changed
|
||||
id: m365-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/m365/**
|
||||
./tests/providers/m365/**
|
||||
.poetry.lock
|
||||
|
||||
- name: M365 - Test
|
||||
if: steps.m365-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
- name: Test with pytest
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
|
||||
- name: Config - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
|
||||
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./lib_coverage.xml,./config_coverage.xml
|
||||
|
||||
10
.github/workflows/sdk-pypi-release.yml
vendored
10
.github/workflows/sdk-pypi-release.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
# CACHE: "poetry"
|
||||
CACHE: "poetry"
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -64,17 +64,17 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
# cache: ${{ env.CACHE }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
|
||||
@@ -4,7 +4,7 @@ name: SDK - Refresh AWS services' regions
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * 1" # runs at 09:00 UTC every Monday
|
||||
- cron: "0 9 * * *" #runs at 09:00 UTC everyday
|
||||
|
||||
env:
|
||||
GITHUB_BRANCH: "master"
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -50,13 +50,12 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -70,18 +70,18 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
6
.github/workflows/ui-codeql.yml
vendored
6
.github/workflows/ui-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
10
.github/workflows/ui-pull-request.yml
vendored
10
.github/workflows/ui-pull-request.yml
vendored
@@ -27,11 +27,11 @@ jobs:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
@@ -46,11 +46,11 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -42,9 +42,6 @@ junit-reports/
|
||||
# VSCode files
|
||||
.vscode/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
*.tfstate
|
||||
@@ -53,7 +50,6 @@ junit-reports/
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
|
||||
@@ -59,7 +59,7 @@ repos:
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.1.1
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
@@ -68,7 +68,7 @@ repos:
|
||||
|
||||
- id: poetry-lock
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
args: ["--no-update", "--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
@@ -78,7 +78,7 @@ repos:
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
args: ["--no-update", "--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
|
||||
@@ -115,7 +115,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429'
|
||||
entry: bash -c 'safety check --ignore 70612,66963'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
64
Dockerfile
64
Dockerfile
@@ -1,64 +1,38 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
FROM python:3.12.9-alpine3.20
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
|
||||
# Create non-root user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
|
||||
echo 'prowler:x:1000:' > /etc/group && \
|
||||
chown -R prowler:prowler /home/prowler
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler/
|
||||
COPY prowler/providers/m365/lib/powershell/m365_powershell.py /home/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
# By default poetry does not compile Python source files to bytecode during installation.
|
||||
# This speeds up the installation process, but the first execution may take a little more
|
||||
# time because Python then compiles source files to bytecode automatically. If you want to
|
||||
# compile source files to bytecode during installation, you can use the --compile option
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
# Install PowerShell modules
|
||||
RUN poetry run python prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
ENTRYPOINT ["prowler"]
|
||||
|
||||
174
README.md
174
README.md
@@ -3,7 +3,7 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler</b> is the Open Cloud Security platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
@@ -43,29 +43,15 @@
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an open-source security tool designed to assess and enforce security best practices across AWS, Azure, Google Cloud, and Kubernetes. It supports tasks such as security audits, incident response, continuous monitoring, system hardening, forensic readiness, and remediation processes.
|
||||
|
||||
Prowler includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
|
||||
|
||||
- **Industry Standards:** CIS, NIST 800, NIST CSF, and CISA
|
||||
- **Regulatory Compliance and Governance:** RBI, FedRAMP, and PCI-DSS
|
||||
- **Frameworks for Sensitive Data and Privacy:** GDPR, HIPAA, and FFIEC
|
||||
- **Frameworks for Organizational Governance and Quality Control:** SOC2 and GXP
|
||||
- **AWS-Specific Frameworks:** AWS Foundational Technical Review (FTR) and AWS Well-Architected Framework (Security Pillar)
|
||||
- **National Security Standards:** ENS (Spanish National Security Scheme)
|
||||
- **Custom Security Frameworks:** Tailored to your needs
|
||||
|
||||
## Prowler CLI and Prowler Cloud
|
||||
|
||||
Prowler offers a Command Line Interface (CLI), known as Prowler Open Source, and an additional service built on top of it, called <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
|
||||
Prowler App is a web application that allows you to run Prowler in your cloud provider accounts and visualize the results in a user-friendly interface.
|
||||
|
||||

|
||||
|
||||
>For more details, refer to the [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
>More details at [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
@@ -74,7 +60,6 @@ prowler <provider>
|
||||
```
|
||||

|
||||
|
||||
|
||||
## Prowler Dashboard
|
||||
|
||||
```console
|
||||
@@ -82,34 +67,25 @@ prowler dashboard
|
||||
```
|
||||

|
||||
|
||||
# Prowler at a Glance
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 567 | 82 | 36 | 10 |
|
||||
| GCP | 79 | 13 | 9 | 3 |
|
||||
| Azure | 142 | 18 | 10 | 3 |
|
||||
| Kubernetes | 83 | 7 | 5 | 7 |
|
||||
| GitHub | 16 | 2 | 1 | 0 |
|
||||
| M365 | 69 | 7 | 2 | 2 |
|
||||
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
|
||||
| AWS | 564 | 82 | 33 | 10 |
|
||||
| GCP | 77 | 13 | 5 | 3 |
|
||||
| Azure | 140 | 18 | 6 | 3 |
|
||||
| Kubernetes | 83 | 7 | 2 | 7 |
|
||||
| Microsoft365 | 5 | 2 | 1 | 0 |
|
||||
|
||||
> [!Note]
|
||||
> The numbers in the table are updated periodically.
|
||||
|
||||
> [!Tip]
|
||||
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
|
||||
|
||||
> [!Note]
|
||||
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories: `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
> You can list the checks, services, compliance frameworks and categories with `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App offers flexible installation methods tailored to various environments:
|
||||
Prowler App can be installed in different ways, depending on your environment:
|
||||
|
||||
> For detailed instructions on using Prowler App, refer to the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
> See how to use Prowler App in the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
|
||||
### Docker Compose
|
||||
|
||||
@@ -125,23 +101,15 @@ curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/mast
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`.
|
||||
|
||||
### Configuring Your Workstation for Prowler App
|
||||
|
||||
If your workstation's architecture is incompatible, you can resolve this by:
|
||||
|
||||
- **Setting the environment variable**: `DOCKER_DEFAULT_PLATFORM=linux/amd64`
|
||||
- **Using the following flag in your Docker command**: `--platform linux/amd64`
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
@@ -160,12 +128,12 @@ python manage.py migrate --database admin
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
> After completing the setup, access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
**Commands to run the API Worker**
|
||||
|
||||
@@ -203,31 +171,29 @@ npm run build
|
||||
npm start
|
||||
```
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/). Consequently, it can be installed using pip with Python >3.9.1, <3.13:
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python > 3.9.1, < 3.13:
|
||||
|
||||
```console
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
>For further guidance, refer to [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
|
||||
### Containers
|
||||
|
||||
**Available Versions of Prowler CLI**
|
||||
The available versions of Prowler CLI are the following:
|
||||
|
||||
The following versions of Prowler CLI are available, depending on your requirements:
|
||||
|
||||
- `latest`: Synchronizes with the `master` branch. Note that this version is not stable.
|
||||
- `v4-latest`: Synchronizes with the `v4` branch. Note that this version is not stable.
|
||||
- `v3-latest`: Synchronizes with the `v3` branch. Note that this version is not stable.
|
||||
- `<x.y.z>` (release): Stable releases corresponding to specific versions. You can find the complete list of releases [here](https://github.com/prowler-cloud/prowler/releases).
|
||||
- `stable`: Always points to the latest release.
|
||||
- `v4-stable`: Always points to the latest release for v4.
|
||||
- `v3-stable`: Always points to the latest release for v3.
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v4-latest`: in sync with `v4` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v4-stable`: this tag always point to the latest release for v4.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
- Prowler CLI:
|
||||
@@ -239,56 +205,35 @@ The container images are available here:
|
||||
|
||||
### From GitHub
|
||||
|
||||
Python >3.9.1, <3.13 is required with pip and Poetry:
|
||||
Python > 3.9.1, < 3.13 is required with pip and poetry:
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
python prowler-cli.py -v
|
||||
python prowler.py -v
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> To clone Prowler on Windows, configure Git to support long file paths by running the following command: `git config core.longpaths true`.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
# ✏️ High level architecture
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
## Prowler App
|
||||
**Prowler App** is composed of three key components:
|
||||
The **Prowler App** consists of three main components:
|
||||
|
||||
- **Prowler UI**: A web-based interface, built with Next.js, providing a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
- **Prowler API**: A backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
|
||||
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
|
||||
- **Prowler UI**: A user-friendly web interface for running Prowler and viewing results, powered by Next.js.
|
||||
- **Prowler API**: The backend API that executes Prowler scans and stores the results, built with Django REST Framework.
|
||||
- **Prowler SDK**: A Python SDK that integrates with the Prowler CLI for advanced functionality.
|
||||
|
||||

|
||||
|
||||
## Prowler CLI
|
||||
|
||||
**Running Prowler**
|
||||
|
||||
Prowler can be executed across various environments, offering flexibility to meet your needs. It can be run from:
|
||||
|
||||
- Your own workstation
|
||||
|
||||
- A Kubernetes Job
|
||||
|
||||
- Google Compute Engine
|
||||
|
||||
- Azure Virtual Machines (VMs)
|
||||
|
||||
- Amazon EC2 instances
|
||||
|
||||
- AWS Fargate or other container platforms
|
||||
|
||||
- CloudShell
|
||||
|
||||
And many more environments.
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||

|
||||
|
||||
@@ -296,36 +241,23 @@ And many more environments.
|
||||
|
||||
## General
|
||||
- `Allowlist` now is called `Mutelist`.
|
||||
- The `--quiet` option has been deprecated. Use the `--status` flag to filter findings based on their status: PASS, FAIL, or MANUAL.
|
||||
- All findings with an `INFO` status have been reclassified as `MANUAL`.
|
||||
- The CSV output format is standardized across all providers.
|
||||
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
|
||||
- All `INFO` finding's status has changed to `MANUAL`.
|
||||
- The CSV output format is common for all the providers.
|
||||
|
||||
**Deprecated Output Formats**
|
||||
|
||||
The following formats are now deprecated:
|
||||
- Native JSON has been replaced with JSON in [OCSF] v1.1.0 format, which is standardized across all providers (https://schema.ocsf.io/).
|
||||
We have deprecated some of our outputs formats:
|
||||
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
|
||||
|
||||
## AWS
|
||||
|
||||
**AWS Flag Deprecation**
|
||||
|
||||
The flag --sts-endpoint-region has been deprecated due to the adoption of AWS STS regional tokens.
|
||||
|
||||
**Sending FAIL Results to AWS Security Hub**
|
||||
|
||||
- To send only FAILS to AWS Security Hub, use one of the following options: `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
|
||||
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
**Documentation Resources**
|
||||
|
||||
For installation instructions, usage details, tutorials, and the Developer Guide, visit https://docs.prowler.com/
|
||||
Install, Usage, Tutorials and Developer Guide is at https://docs.prowler.com/
|
||||
|
||||
# 📃 License
|
||||
|
||||
**Prowler License Information**
|
||||
|
||||
Prowler is licensed under the Apache License 2.0, as indicated in each file within the repository. Obtaining a Copy of the License
|
||||
|
||||
A copy of the License is available at <http://www.apache.org/licenses/LICENSE-2.0>
|
||||
Prowler is licensed as Apache License 2.0 as specified in each file. You may obtain a copy of the License at
|
||||
<http://www.apache.org/licenses/LICENSE-2.0>
|
||||
|
||||
@@ -53,6 +53,3 @@ DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""
|
||||
|
||||
# Deletion Task Batch Size
|
||||
DJANGO_DELETION_BATCH_SIZE=5000
|
||||
|
||||
@@ -80,7 +80,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963,74429'
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
@@ -2,89 +2,6 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [v1.9.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- Support GCP Service Account key. [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
|
||||
### Changed
|
||||
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
|
||||
|
||||
### Fixed
|
||||
- Fixed the connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.2] (Prowler v5.7.2)
|
||||
|
||||
### Fixed
|
||||
- Fixed task lookup to use task_kwargs instead of task_args for scan report resolution. [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
|
||||
- Fixed Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
|
||||
- Fixed a race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876).
|
||||
- Fixed an error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890).
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.1] (Prowler v5.7.1)
|
||||
|
||||
### Fixed
|
||||
- Added database index to improve performance on finding lookup. [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.0] (Prowler v5.7.0)
|
||||
|
||||
### Added
|
||||
- Added huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added new queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added new endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743).
|
||||
- Added export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
|
||||
---
|
||||
|
||||
## [v1.7.0] (Prowler v5.6.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Added M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563).
|
||||
- Added a `compliance/` folder and ZIP‐export functionality for all compliance reports.[(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
- Added a new API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
|
||||
---
|
||||
|
||||
## [v1.6.0] (Prowler v5.5.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289).
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333).
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378).
|
||||
- Added missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.4] (Prowler v5.4.4)
|
||||
|
||||
### Fixed
|
||||
- Fixed a bug with periodic tasks when trying to delete a provider ([#7466])(https://github.com/prowler-cloud/prowler/pull/7466).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
|
||||
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
@@ -115,6 +32,6 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
- Increase the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
|
||||
---
|
||||
|
||||
@@ -1,33 +1,13 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
FROM python:3.12.8-alpine3.20 AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add gcc python3-dev musl-dev linux-headers curl-dev
|
||||
|
||||
RUN apk --no-cache upgrade && \
|
||||
addgroup -g 1000 prowler && \
|
||||
adduser -D -u 1000 -G prowler prowler
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
@@ -37,23 +17,27 @@ COPY pyproject.toml ./
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
RUN poetry install && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
# hadolint ignore=DL3006
|
||||
FROM build AS dev
|
||||
|
||||
USER 0
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add curl vim
|
||||
|
||||
USER prowler
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
|
||||
|
||||
# Production image
|
||||
|
||||
@@ -235,7 +235,6 @@ To view the logs for any component (e.g., Django, Celery worker), you can use th
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
```
|
||||
|
||||
## Applying migrations
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
2219
api/poetry.lock
generated
2219
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,43 +2,39 @@
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.8",
|
||||
"django-allauth==65.4.1",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra (>=2.0.8,<3.0.0)",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
|
||||
"drf-nested-routers (>=0.94.1,<1.0.0)",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10"
|
||||
]
|
||||
[tool.poetry]
|
||||
authors = ["Prowler Team"]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.9.0"
|
||||
version = "1.5.1"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
dj-rest-auth = {extras = ["with_social", "jwt"], version = "7.0.1"}
|
||||
django = "5.1.5"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
django-environ = "0.11.2"
|
||||
django-filter = "24.3"
|
||||
django-guid = "3.5.0"
|
||||
django-postgres-extra = "^2.0.8"
|
||||
djangorestframework = "3.15.2"
|
||||
djangorestframework-jsonapi = "7.0.2"
|
||||
djangorestframework-simplejwt = "^5.3.1"
|
||||
drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "v5.4"}
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
python = ">=3.11,<3.13"
|
||||
sentry-sdk = {extras = ["django"], version = "^2.20.0"}
|
||||
uuid6 = "2024.7.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
@@ -46,7 +42,6 @@ coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
marshmallow = ">=3.15.0,<4.0.0"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
@@ -59,3 +54,6 @@ ruff = "0.5.0"
|
||||
safety = "3.2.9"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -30,10 +30,6 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = sociallogin.account.extra_data.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
|
||||
@@ -109,6 +109,16 @@ class BaseTenantViewset(BaseViewSet):
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
@@ -116,8 +126,8 @@ class BaseTenantViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
user_id = str(request.user.id)
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -1,38 +1,12 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS = {}
|
||||
|
||||
|
||||
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
|
||||
"""
|
||||
Retrieve and cache the list of available compliance frameworks for a specific cloud provider.
|
||||
|
||||
This function lazily loads and caches the available compliance frameworks (e.g., CIS, MITRE, ISO)
|
||||
for each provider type (AWS, Azure, GCP, etc.) on first access. Subsequent calls for the same
|
||||
provider will return the cached result.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
|
||||
available compliance frameworks (e.g., "aws", "azure", "gcp", "m365").
|
||||
|
||||
Returns:
|
||||
list[str]: A list of framework identifiers (e.g., "cis_1.4_aws", "mitre_attack_azure") available
|
||||
for the given provider.
|
||||
"""
|
||||
global AVAILABLE_COMPLIANCE_FRAMEWORKS
|
||||
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = (
|
||||
get_available_compliance_frameworks(provider_type)
|
||||
)
|
||||
|
||||
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
|
||||
|
||||
|
||||
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
|
||||
|
||||
@@ -6,7 +6,6 @@ from datetime import datetime, timedelta, timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.db import connection, models, transaction
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
@@ -106,12 +105,11 @@ def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_SIZE):
|
||||
def batch_delete(queryset, batch_size=5000):
|
||||
"""
|
||||
Deletes objects in batches and returns the total number of deletions and a summary.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the queryset belongs to.
|
||||
queryset (QuerySet): The queryset of objects to delete.
|
||||
batch_size (int): The number of objects to delete in each batch.
|
||||
|
||||
@@ -122,16 +120,15 @@ def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_
|
||||
deletion_summary = {}
|
||||
|
||||
while True:
|
||||
with rls_transaction(tenant_id, POSTGRES_TENANT_VAR):
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
for model_label, count in deleted_info.items():
|
||||
@@ -140,18 +137,6 @@ def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_
|
||||
return total_deleted, deletion_summary
|
||||
|
||||
|
||||
def delete_related_daily_task(provider_id: str):
|
||||
"""
|
||||
Deletes the periodic task associated with a specific provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The unique identifier for the provider
|
||||
whose related periodic task should be deleted.
|
||||
"""
|
||||
task_name = f"scan-perform-scheduled-{provider_id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
@@ -227,77 +212,6 @@ def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
columns: str,
|
||||
method: str = "BTREE",
|
||||
where: str = "",
|
||||
):
|
||||
"""
|
||||
Create an index on every existing partition of `parent_table`.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: A short name for the index (will be prefixed per-partition).
|
||||
columns: The parenthesized column list, e.g. "tenant_id, scan_id, status".
|
||||
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
|
||||
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
where_sql = f" WHERE {where}" if where else ""
|
||||
for partition in partitions:
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
|
||||
f"ON {partition} USING {method} ({columns})"
|
||||
f"{where_sql};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
):
|
||||
"""
|
||||
Drop the per-partition indexes that were created by create_index_on_partitions.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: The same short name used when creating them.
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
for partition in partitions:
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {idx_name};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
@@ -404,15 +318,3 @@ class InvitationStateEnum(EnumType):
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Integration type
|
||||
|
||||
|
||||
class IntegrationTypeEnum(EnumType):
|
||||
enum_type_name = "integration_type"
|
||||
|
||||
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
@@ -24,7 +24,6 @@ from api.db_utils import (
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
@@ -81,114 +80,6 @@ class ChoiceInFilter(BaseInFilter, ChoiceFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CommonFindingFilters(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
muted = BooleanFilter(
|
||||
help_text="If this filter is not provided, muted and non-muted findings will be returned."
|
||||
)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(method="filter_resource_region")
|
||||
region__in = CharInFilter(field_name="resource_regions", lookup_expr="overlap")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resource_regions", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(method="filter_resource_service")
|
||||
service__in = CharInFilter(field_name="resource_services", lookup_expr="overlap")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resource_services", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(method="filter_resource_type")
|
||||
resource_type__in = CharInFilter(field_name="resource_types", lookup_expr="overlap")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
|
||||
class TenantFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
@@ -365,7 +256,91 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class FindingFilter(CommonFindingFilters):
|
||||
class FindingFilter(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(field_name="resources__region")
|
||||
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resources__region", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(field_name="resources__service")
|
||||
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resources__service", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(field_name="resources__type")
|
||||
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
@@ -406,15 +381,6 @@ class FindingFilter(CommonFindingFilters):
|
||||
},
|
||||
}
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("inserted_at")
|
||||
@@ -533,6 +499,16 @@ class FindingFilter(CommonFindingFilters):
|
||||
|
||||
return queryset.filter(id__lt=end)
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
dt = value
|
||||
@@ -541,31 +517,6 @@ class FindingFilter(CommonFindingFilters):
|
||||
return dt
|
||||
|
||||
|
||||
class LatestFindingFilter(CommonFindingFilters):
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
@@ -662,6 +613,12 @@ class ScanSummaryFilter(FilterSet):
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
muted_findings = BooleanFilter(method="filter_muted_findings")
|
||||
|
||||
def filter_muted_findings(self, queryset, name, value):
|
||||
if not value:
|
||||
return queryset.exclude(muted__gt=0)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
@@ -672,6 +629,8 @@ class ScanSummaryFilter(FilterSet):
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
muted_findings = None
|
||||
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
@@ -689,19 +648,3 @@ class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
|
||||
class IntegrationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
|
||||
integration_type__in = ChoiceInFilter(
|
||||
choices=Integration.IntegrationChoices.choices,
|
||||
field_name="integration_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ from api.models import (
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
StatusChoices,
|
||||
)
|
||||
@@ -134,7 +133,6 @@ class Command(BaseCommand):
|
||||
region=random.choice(possible_regions),
|
||||
service=random.choice(possible_services),
|
||||
type=random.choice(possible_types),
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -183,10 +181,6 @@ class Command(BaseCommand):
|
||||
"servicename": assigned_resource.service,
|
||||
"resourcetype": assigned_resource.type,
|
||||
},
|
||||
resource_types=[assigned_resource.type],
|
||||
resource_regions=[assigned_resource.region],
|
||||
resource_services=[assigned_resource.service],
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -203,22 +197,12 @@ class Command(BaseCommand):
|
||||
|
||||
# Create ResourceFindingMapping
|
||||
mappings = []
|
||||
scan_resource_cache: set[tuple] = set()
|
||||
for index, finding_instance in enumerate(findings):
|
||||
resource_instance = resources[findings_resources_mapping[index]]
|
||||
for index, f in enumerate(findings):
|
||||
mappings.append(
|
||||
ResourceFindingMapping(
|
||||
tenant_id=tenant_id,
|
||||
resource=resource_instance,
|
||||
finding=finding_instance,
|
||||
)
|
||||
)
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
str(resource_instance.id),
|
||||
resource_instance.service,
|
||||
resource_instance.region,
|
||||
resource_instance.type,
|
||||
resource=resources[findings_resources_mapping[index]],
|
||||
finding=f,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -236,38 +220,6 @@ class Command(BaseCommand):
|
||||
"Resource-finding mappings created successfully.\n\n"
|
||||
)
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 99
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.WARNING("Creating finding filter values..."))
|
||||
resource_scan_summaries = [
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
resource_id=resource_id,
|
||||
service=service,
|
||||
region=region,
|
||||
resource_type=resource_type,
|
||||
)
|
||||
for resource_id, service, region, resource_type in scan_resource_cache
|
||||
]
|
||||
num_batches = ceil(len(resource_scan_summaries) / batch_size)
|
||||
with rls_transaction(tenant_id):
|
||||
for i in tqdm(
|
||||
range(0, len(resource_scan_summaries), batch_size),
|
||||
total=num_batches,
|
||||
):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
resource_scan_summaries[i : i + batch_size],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Finding filter values created successfully.\n\n")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
|
||||
scan_state = "failed"
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
|
||||
from api.models import Integration
|
||||
|
||||
IntegrationTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="integration_type",
|
||||
enum_values=tuple(
|
||||
integration_type[0]
|
||||
for integration_type in Integration.IntegrationChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0012_scan_report_output"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
IntegrationTypeEnumMigration.create_enum_type,
|
||||
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=IntegrationTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,131 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0013_integrations_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Integration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("enabled", models.BooleanField(default=False)),
|
||||
("connected", models.BooleanField(blank=True, null=True)),
|
||||
(
|
||||
"connection_last_checked_at",
|
||||
models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
(
|
||||
"integration_type",
|
||||
api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("saml", "SAML"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
("_credentials", models.BinaryField(db_column="credentials")),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={"db_table": "integrations", "abstract": False},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="integration",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IntegrationProviderRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"integration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.integration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "integration_provider_mappings",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("integration_id", "provider_id"),
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="IntegrationProviderRelationship",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integrationproviderrelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="integration",
|
||||
name="providers",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="integrations",
|
||||
through="api.IntegrationProviderRelationship",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,26 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-25 11:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0014_integrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="finding",
|
||||
name="status",
|
||||
field=api.db_utils.StatusEnumField(
|
||||
choices=[("FAIL", "Fail"), ("PASS", "Pass"), ("MANUAL", "Manual")]
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,32 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-31 10:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0015_finding_muted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="compliance",
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="details",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="metadata",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="partition",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,32 +0,0 @@
|
||||
# Generated by Django 5.1.7 on 2025-04-16 08:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0016_finding_compliance_resource_details_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="provider",
|
||||
field=api.db_utils.ProviderEnumField(
|
||||
choices=[
|
||||
("aws", "AWS"),
|
||||
("azure", "Azure"),
|
||||
("gcp", "GCP"),
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
],
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'm365';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,81 +0,0 @@
|
||||
# Generated by Django 5.1.7 on 2025-05-05 10:01
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
import uuid6
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0017_m365_provider"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ResourceScanSummary",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("scan_id", models.UUIDField(db_index=True, default=uuid6.uuid7)),
|
||||
("resource_id", models.UUIDField(db_index=True, default=uuid.uuid4)),
|
||||
("service", models.CharField(max_length=100)),
|
||||
("region", models.CharField(max_length=100)),
|
||||
("resource_type", models.CharField(max_length=100)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "resource_scan_summaries",
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="rss_tenant_scan_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region"],
|
||||
name="rss_tenant_scan_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "resource_type"],
|
||||
name="rss_tenant_scan_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "service"],
|
||||
name="rss_tenant_scan_reg_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service", "resource_type"],
|
||||
name="rss_tenant_scan_svc_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "resource_type"],
|
||||
name="rss_tenant_scan_reg_type_idx",
|
||||
),
|
||||
],
|
||||
"unique_together": {("tenant_id", "scan_id", "resource_id")},
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="resourcescansummary",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_resourcescansummary",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,42 +0,0 @@
|
||||
import django.contrib.postgres.fields
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0018_resource_scan_summaries"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_regions",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_services",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_types",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,86 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0019_finding_denormalize_resource_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_service_idx",
|
||||
columns="resource_services",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_service_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_region_idx",
|
||||
columns="resource_regions",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_region_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_rtype_idx",
|
||||
columns="resource_types",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_rtype_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_uid_idx",
|
||||
),
|
||||
reverse_code=partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_uid_idx",
|
||||
columns="uid",
|
||||
method="BTREE",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_filter_idx",
|
||||
),
|
||||
reverse_code=partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_filter_idx",
|
||||
columns="scan_id, impact, severity, status, check_id, delta",
|
||||
method="BTREE",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,37 +0,0 @@
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0020_findings_new_performance_indexes_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_services"], name="gin_find_service_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_regions"], name="gin_find_region_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_types"], name="gin_find_rtype_idx"
|
||||
),
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="finding",
|
||||
name="findings_uid_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="finding",
|
||||
name="findings_filter_idx",
|
||||
),
|
||||
]
|
||||
@@ -1,38 +0,0 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-12 10:04
|
||||
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0021_findings_new_performance_indexes_parent"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state", "completed")),
|
||||
fields=["tenant_id", "provider_id", "state", "-inserted_at"],
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="ss_tenant_scan_service_idx",
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "severity"],
|
||||
name="ss_tenant_scan_severity_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,28 +0,0 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-12 10:18
|
||||
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0022_scan_summaries_performance_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"], name="resources_tenant_id_idx"
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,29 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0023_resources_lookup_optimization"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
columns="tenant_id, uid, inserted_at DESC",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0024_findings_uid_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "uid", "-inserted_at"],
|
||||
name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0025_findings_uid_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider_secret_type ADD VALUE IF NOT EXISTS 'service_account';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,13 +1,10 @@
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from config.env import env
|
||||
from cryptography.fernet import Fernet
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AbstractBaseUser
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVector, SearchVectorField
|
||||
from django.core.validators import MinLengthValidator
|
||||
@@ -24,7 +21,6 @@ from uuid6 import uuid7
|
||||
from api.db_utils import (
|
||||
CustomUserManager,
|
||||
FindingDeltaEnumField,
|
||||
IntegrationTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
MemberRoleEnumField,
|
||||
ProviderEnumField,
|
||||
@@ -62,6 +58,7 @@ class StatusChoices(models.TextChoices):
|
||||
FAIL = "FAIL", _("Fail")
|
||||
PASS = "PASS", _("Pass")
|
||||
MANUAL = "MANUAL", _("Manual")
|
||||
MUTED = "MUTED", _("Muted")
|
||||
|
||||
|
||||
class StateChoices(models.TextChoices):
|
||||
@@ -194,7 +191,6 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
AZURE = "azure", _("Azure")
|
||||
GCP = "gcp", _("GCP")
|
||||
KUBERNETES = "kubernetes", _("Kubernetes")
|
||||
M365 = "m365", _("M365")
|
||||
|
||||
@staticmethod
|
||||
def validate_aws_uid(value):
|
||||
@@ -218,19 +214,6 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_m365_uid(value):
|
||||
if not re.match(
|
||||
r"""^(?!-)[A-Za-z0-9](?:[A-Za-z0-9-]{0,61}[A-Za-z0-9])?(?:\.(?!-)[A-Za-z0-9]"""
|
||||
r"""(?:[A-Za-z0-9-]{0,61}[A-Za-z0-9])?)*\.[A-Za-z]{2,}$""",
|
||||
value,
|
||||
):
|
||||
raise ModelValidationError(
|
||||
detail="M365 domain ID must be a valid domain.",
|
||||
code="m365-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_gcp_uid(value):
|
||||
if not re.match(r"^[a-z][a-z0-9-]{5,29}$", value):
|
||||
@@ -244,7 +227,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
@staticmethod
|
||||
def validate_kubernetes_uid(value):
|
||||
if not re.match(
|
||||
r"^[a-zA-Z0-9][a-zA-Z0-9._@:\/-]{1,250}$",
|
||||
r"^[a-z0-9][A-Za-z0-9_.:\/-]{1,250}$",
|
||||
value,
|
||||
):
|
||||
raise ModelValidationError(
|
||||
@@ -354,42 +337,6 @@ class ProviderGroupMembership(RowLevelSecurityProtectedModel):
|
||||
resource_name = "provider_groups-provider"
|
||||
|
||||
|
||||
class TaskManager(models.Manager):
|
||||
def get_with_retry(
|
||||
self,
|
||||
id: str,
|
||||
max_retries: int = None,
|
||||
delay_seconds: float = None,
|
||||
):
|
||||
"""
|
||||
Retry fetching a Task by ID in case it hasn't been created yet.
|
||||
|
||||
Args:
|
||||
id (str): The Celery task ID (expected to match Task model PK).
|
||||
max_retries (int, optional): Number of retry attempts. Defaults to env TASK_RETRY_ATTEMPTS or 5.
|
||||
delay_seconds (float, optional): Delay between retries in seconds. Defaults to env TASK_RETRY_DELAY_SECONDS or 0.1.
|
||||
|
||||
Returns:
|
||||
Task: The retrieved Task instance.
|
||||
|
||||
Raises:
|
||||
Task.DoesNotExist: If the task is not found after all retries.
|
||||
"""
|
||||
max_retries = max_retries or env.int("TASK_RETRY_ATTEMPTS", default=5)
|
||||
delay_seconds = delay_seconds or env.float(
|
||||
"TASK_RETRY_DELAY_SECONDS", default=0.1
|
||||
)
|
||||
|
||||
for _attempt in range(max_retries):
|
||||
try:
|
||||
return self.get(id=id)
|
||||
except self.model.DoesNotExist:
|
||||
time.sleep(delay_seconds)
|
||||
raise self.model.DoesNotExist(
|
||||
f"Task with ID {id} not found after {max_retries} retries."
|
||||
)
|
||||
|
||||
|
||||
class Task(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
@@ -402,8 +349,6 @@ class Task(RowLevelSecurityProtectedModel):
|
||||
blank=True,
|
||||
)
|
||||
|
||||
objects = TaskManager()
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "tasks"
|
||||
|
||||
@@ -470,7 +415,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
|
||||
)
|
||||
output_location = models.CharField(blank=True, null=True, max_length=200)
|
||||
|
||||
# TODO: mutelist foreign key
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
@@ -493,11 +437,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "-inserted_at"],
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -579,11 +518,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
editable=False,
|
||||
)
|
||||
|
||||
metadata = models.TextField(blank=True, null=True)
|
||||
details = models.TextField(blank=True, null=True)
|
||||
partition = models.TextField(blank=True, null=True)
|
||||
|
||||
# Relationships
|
||||
tags = models.ManyToManyField(
|
||||
ResourceTag,
|
||||
verbose_name="Tags associated with the resource, by provider",
|
||||
@@ -624,11 +558,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
name="resource_tenant_metadata_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
|
||||
models.Index(fields=["tenant_id", "id"], name="resources_tenant_id_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
@@ -726,23 +655,6 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
tags = models.JSONField(default=dict, null=True, blank=True)
|
||||
check_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
check_metadata = models.JSONField(default=dict, null=False)
|
||||
muted = models.BooleanField(default=False, null=False)
|
||||
compliance = models.JSONField(default=dict, null=True, blank=True)
|
||||
|
||||
# Denormalize resource data for performance
|
||||
resource_regions = ArrayField(
|
||||
models.CharField(max_length=100), blank=True, null=True
|
||||
)
|
||||
resource_services = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
resource_types = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
scan = models.ForeignKey(to=Scan, related_name="findings", on_delete=models.CASCADE)
|
||||
@@ -784,6 +696,18 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(fields=["uid"], name="findings_uid_idx"),
|
||||
models.Index(
|
||||
fields=[
|
||||
"scan_id",
|
||||
"impact",
|
||||
"severity",
|
||||
"status",
|
||||
"check_id",
|
||||
"delta",
|
||||
],
|
||||
name="findings_filter_idx",
|
||||
),
|
||||
models.Index(fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"),
|
||||
GinIndex(fields=["text_search"], name="gin_findings_search_idx"),
|
||||
models.Index(fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"),
|
||||
@@ -795,42 +719,19 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
condition=Q(delta="new"),
|
||||
name="find_delta_new_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "uid", "-inserted_at"],
|
||||
name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
GinIndex(fields=["resource_services"], name="gin_find_service_idx"),
|
||||
GinIndex(fields=["resource_regions"], name="gin_find_region_idx"),
|
||||
GinIndex(fields=["resource_types"], name="gin_find_rtype_idx"),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "findings"
|
||||
|
||||
def add_resources(self, resources: list[Resource] | None):
|
||||
if not resources:
|
||||
return
|
||||
|
||||
self.resource_regions = self.resource_regions or []
|
||||
self.resource_services = self.resource_services or []
|
||||
self.resource_types = self.resource_types or []
|
||||
|
||||
# Deduplication
|
||||
regions = set(self.resource_regions)
|
||||
services = set(self.resource_services)
|
||||
types = set(self.resource_types)
|
||||
|
||||
# Add new relationships with the tenant_id field
|
||||
for resource in resources:
|
||||
ResourceFindingMapping.objects.update_or_create(
|
||||
resource=resource, finding=self, tenant_id=self.tenant_id
|
||||
)
|
||||
regions.add(resource.region)
|
||||
services.add(resource.service)
|
||||
types.add(resource.type)
|
||||
|
||||
self.resource_regions = list(regions)
|
||||
self.resource_services = list(services)
|
||||
self.resource_types = list(types)
|
||||
# Save the instance
|
||||
self.save()
|
||||
|
||||
|
||||
@@ -890,7 +791,6 @@ class ProviderSecret(RowLevelSecurityProtectedModel):
|
||||
class TypeChoices(models.TextChoices):
|
||||
STATIC = "static", _("Key-value pairs")
|
||||
ROLE = "role", _("Role assumption")
|
||||
SERVICE_ACCOUNT = "service_account", _("GCP Service Account Key")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
@@ -1233,142 +1133,8 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"],
|
||||
name="scan_summaries_tenant_scan_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="ss_tenant_scan_service_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "severity"],
|
||||
name="ss_tenant_scan_severity_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scan-summaries"
|
||||
|
||||
|
||||
class Integration(RowLevelSecurityProtectedModel):
|
||||
class IntegrationChoices(models.TextChoices):
|
||||
S3 = "amazon_s3", _("Amazon S3")
|
||||
SAML = "saml", _("SAML")
|
||||
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
|
||||
JIRA = "jira", _("JIRA")
|
||||
SLACK = "slack", _("Slack")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
enabled = models.BooleanField(default=False)
|
||||
connected = models.BooleanField(null=True, blank=True)
|
||||
connection_last_checked_at = models.DateTimeField(null=True, blank=True)
|
||||
integration_type = IntegrationTypeEnumField(choices=IntegrationChoices.choices)
|
||||
configuration = models.JSONField(default=dict)
|
||||
_credentials = models.BinaryField(db_column="credentials")
|
||||
|
||||
providers = models.ManyToManyField(
|
||||
Provider,
|
||||
related_name="integrations",
|
||||
through="IntegrationProviderRelationship",
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "integrations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "integrations"
|
||||
|
||||
@property
|
||||
def credentials(self):
|
||||
if isinstance(self._credentials, memoryview):
|
||||
encrypted_bytes = self._credentials.tobytes()
|
||||
elif isinstance(self._credentials, str):
|
||||
encrypted_bytes = self._credentials.encode()
|
||||
else:
|
||||
encrypted_bytes = self._credentials
|
||||
decrypted_data = fernet.decrypt(encrypted_bytes)
|
||||
return json.loads(decrypted_data.decode())
|
||||
|
||||
@credentials.setter
|
||||
def credentials(self, value):
|
||||
encrypted_data = fernet.encrypt(json.dumps(value).encode())
|
||||
self._credentials = encrypted_data
|
||||
|
||||
|
||||
class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
integration = models.ForeignKey(Integration, on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "integration_provider_mappings"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["integration_id", "provider_id"],
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class ResourceScanSummary(RowLevelSecurityProtectedModel):
|
||||
scan_id = models.UUIDField(default=uuid7, db_index=True)
|
||||
resource_id = models.UUIDField(default=uuid4, db_index=True)
|
||||
service = models.CharField(max_length=100)
|
||||
region = models.CharField(max_length=100)
|
||||
resource_type = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
db_table = "resource_scan_summaries"
|
||||
unique_together = (("tenant_id", "scan_id", "resource_id"),)
|
||||
|
||||
indexes = [
|
||||
# Single-dimension lookups:
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="rss_tenant_scan_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region"],
|
||||
name="rss_tenant_scan_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "resource_type"],
|
||||
name="rss_tenant_scan_type_idx",
|
||||
),
|
||||
# Two-dimension cross-filters:
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "service"],
|
||||
name="rss_tenant_scan_reg_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service", "resource_type"],
|
||||
name="rss_tenant_scan_svc_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "resource_type"],
|
||||
name="rss_tenant_scan_reg_type_idx",
|
||||
),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
@@ -2,7 +2,8 @@ from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import DEFAULT_DB_ALIAS, models
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
from django.db import models
|
||||
from django.db.backends.ddl_references import Statement, Table
|
||||
|
||||
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
|
||||
@@ -58,11 +59,11 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -87,7 +88,9 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = f"{self.rls_sql_query}{policy_queries}{grant_queries}"
|
||||
full_create_sql_query = (
|
||||
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
|
||||
)
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
@@ -104,20 +107,16 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
raw_table_name = model._meta.db_table
|
||||
table_name = raw_table_name
|
||||
if self.partition_name:
|
||||
raw_table_name = f"{raw_table_name}_{self.partition_name}"
|
||||
table_name = raw_table_name
|
||||
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
|
||||
)
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
raw_table_name=raw_table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from config.celery import celery_app
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Provider
|
||||
from config.celery import celery_app
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
@@ -31,4 +31,5 @@ before_task_publish.connect(
|
||||
@receiver(post_delete, sender=Provider)
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
delete_related_daily_task(instance.id)
|
||||
task_name = f"scan-perform-scheduled-{instance.id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -131,10 +131,9 @@ class TestBatchDelete:
|
||||
return provider_count
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_batch_delete(self, tenants_fixture, create_test_providers):
|
||||
tenant_id = str(tenants_fixture[0].id)
|
||||
def test_batch_delete(self, create_test_providers):
|
||||
_, summary = batch_delete(
|
||||
tenant_id, Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import uuid
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from api.models import Resource, ResourceTag, Task
|
||||
from api.models import Resource, ResourceTag
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -95,63 +92,3 @@ class TestResourceModel:
|
||||
|
||||
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
|
||||
assert resource.get_tags(tenant_id=tenant_id) == {}
|
||||
|
||||
|
||||
# @pytest.mark.django_db
|
||||
# class TestFindingModel:
|
||||
# def test_add_finding_with_long_uid(
|
||||
# self, providers_fixture, scans_fixture, resources_fixture
|
||||
# ):
|
||||
# provider, *_ = providers_fixture
|
||||
# tenant_id = provider.tenant_id
|
||||
|
||||
# long_uid = "1" * 500
|
||||
# _ = Finding.objects.create(
|
||||
# tenant_id=tenant_id,
|
||||
# uid=long_uid,
|
||||
# delta=Finding.DeltaChoices.NEW,
|
||||
# check_metadata={},
|
||||
# status=StatusChoices.PASS,
|
||||
# status_extended="",
|
||||
# severity="high",
|
||||
# impact="high",
|
||||
# raw_result={},
|
||||
# check_id="test_check",
|
||||
# scan=scans_fixture[0],
|
||||
# first_seen_at=None,
|
||||
# muted=False,
|
||||
# compliance={},
|
||||
# )
|
||||
# assert Finding.objects.filter(uid=long_uid).exists()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTaskManager:
|
||||
def test_get_with_retry_success(self):
|
||||
task_id = uuid.uuid4()
|
||||
call_counter = {"count": 0}
|
||||
|
||||
def side_effect(*args, **kwargs):
|
||||
if call_counter["count"] < 2:
|
||||
call_counter["count"] += 1
|
||||
raise Task.DoesNotExist()
|
||||
return Task(id=task_id)
|
||||
|
||||
with mock.patch.object(Task.objects, "get", side_effect=side_effect):
|
||||
task = Task.objects.get_with_retry(
|
||||
task_id, max_retries=5, delay_seconds=0.01
|
||||
)
|
||||
|
||||
assert task.id == task_id
|
||||
assert call_counter["count"] == 2
|
||||
|
||||
def test_get_with_retry_fail(self):
|
||||
non_existent_id = uuid.uuid4()
|
||||
|
||||
with mock.patch.object(Task.objects, "get", side_effect=Task.DoesNotExist):
|
||||
with pytest.raises(Task.DoesNotExist) as excinfo:
|
||||
Task.objects.get_with_retry(
|
||||
non_existent_id, max_retries=3, delay_seconds=0.01
|
||||
)
|
||||
|
||||
assert str(non_existent_id) in str(excinfo.value)
|
||||
|
||||
@@ -1,19 +1,7 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
from api.models import (
|
||||
Membership,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.v1.serializers import TokenSerializer
|
||||
from unittest.mock import patch, ANY, Mock
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -316,96 +304,3 @@ class TestProviderViewSet:
|
||||
reverse("provider-connection", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLimitedVisibility:
|
||||
TEST_EMAIL = "rbac@rbac.com"
|
||||
TEST_PASSWORD = "thisisapassword123"
|
||||
|
||||
@pytest.fixture
|
||||
def limited_admin_user(
|
||||
self, django_db_setup, django_db_blocker, tenants_fixture, providers_fixture
|
||||
):
|
||||
with django_db_blocker.unblock():
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email=self.TEST_EMAIL,
|
||||
password=self.TEST_PASSWORD,
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
role = Role.objects.create(
|
||||
name="limited_visibility",
|
||||
tenant=tenant,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
provider_group = ProviderGroup.objects.create(
|
||||
name="limited_visibility_group",
|
||||
tenant=tenant,
|
||||
)
|
||||
ProviderGroupMembership.objects.create(
|
||||
tenant=tenant,
|
||||
provider=provider,
|
||||
provider_group=provider_group,
|
||||
)
|
||||
|
||||
RoleProviderGroupRelationship.objects.create(
|
||||
tenant=tenant, role=role, provider_group=provider_group
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_limited(
|
||||
self, limited_admin_user, tenants_fixture, client
|
||||
):
|
||||
client.user = limited_admin_user
|
||||
tenant_id = tenants_fixture[0].id
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": self.TEST_EMAIL,
|
||||
"password": self.TEST_PASSWORD,
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
def test_integrations(
|
||||
self, authenticated_client_rbac_limited, integrations_fixture, providers_fixture
|
||||
):
|
||||
# Integration 2 is related to provider1 and provider 2
|
||||
# This user cannot see provider 2
|
||||
integration = integrations_fixture[1]
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id})
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert integration.providers.count() == 2
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
|
||||
@@ -19,7 +19,6 @@ from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from prowler.providers.m365.m365_provider import M365Provider
|
||||
|
||||
|
||||
class TestMergeDicts:
|
||||
@@ -105,7 +104,6 @@ class TestReturnProwlerProvider:
|
||||
(Provider.ProviderChoices.GCP.value, GcpProvider),
|
||||
(Provider.ProviderChoices.AZURE.value, AzureProvider),
|
||||
(Provider.ProviderChoices.KUBERNETES.value, KubernetesProvider),
|
||||
(Provider.ProviderChoices.M365.value, M365Provider),
|
||||
],
|
||||
)
|
||||
def test_return_prowler_provider(self, provider_type, expected_provider):
|
||||
@@ -178,10 +176,6 @@ class TestGetProwlerProviderKwargs:
|
||||
Provider.ProviderChoices.KUBERNETES.value,
|
||||
{"context": "provider_uid"},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.M365.value,
|
||||
{},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_prowler_provider_kwargs(self, provider_type, expected_extra_kwargs):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,20 +1,16 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from allauth.socialaccount.providers.oauth2.client import OAuth2Client
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.db.models import Subquery
|
||||
from rest_framework.exceptions import NotFound, ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Invitation, Provider, Resource
|
||||
from api.v1.serializers import FindingMetadataSerializer
|
||||
from api.models import Invitation, Provider
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.common.models import Connection
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from prowler.providers.m365.m365_provider import M365Provider
|
||||
|
||||
|
||||
class CustomOAuth2Client(OAuth2Client):
|
||||
@@ -55,14 +51,14 @@ def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
|
||||
def return_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider]:
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider]:
|
||||
"""Return the Prowler provider class based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: The corresponding provider class.
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: The corresponding provider class.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type specified in `provider.provider` is not supported.
|
||||
@@ -76,8 +72,6 @@ def return_prowler_provider(
|
||||
prowler_provider = AzureProvider
|
||||
case Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider = KubernetesProvider
|
||||
case Provider.ProviderChoices.M365.value:
|
||||
prowler_provider = M365Provider
|
||||
case _:
|
||||
raise ValueError(f"Provider type {provider.provider} not supported")
|
||||
return prowler_provider
|
||||
@@ -110,15 +104,15 @@ def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
|
||||
def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider:
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider:
|
||||
"""Initialize a Prowler provider instance based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, or `KubernetesProvider`) initialized with the
|
||||
provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
@@ -136,12 +130,10 @@ def prowler_provider_connection_test(provider: Provider) -> Connection:
|
||||
Connection: A connection object representing the result of the connection test for the specified provider.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
|
||||
try:
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
except Provider.secret.RelatedObjectDoesNotExist as secret_error:
|
||||
return Connection(is_connected=False, error=secret_error)
|
||||
|
||||
return prowler_provider.test_connection(
|
||||
**prowler_provider_kwargs, provider_id=provider.uid, raise_on_exception=False
|
||||
)
|
||||
@@ -208,33 +200,3 @@ def validate_invitation(
|
||||
)
|
||||
|
||||
return invitation
|
||||
|
||||
|
||||
# ToRemove after removing the fallback mechanism in /findings/metadata
|
||||
def get_findings_metadata_no_aggregations(tenant_id: str, filtered_queryset):
|
||||
filtered_ids = filtered_queryset.order_by().values("id")
|
||||
|
||||
relevant_resources = Resource.all_objects.filter(
|
||||
tenant_id=tenant_id, findings__id__in=Subquery(filtered_ids)
|
||||
).only("service", "region", "type")
|
||||
|
||||
aggregation = relevant_resources.aggregate(
|
||||
services=ArrayAgg("service", flat=True),
|
||||
regions=ArrayAgg("region", flat=True),
|
||||
resource_types=ArrayAgg("type", flat=True),
|
||||
)
|
||||
|
||||
services = sorted(set(aggregation["services"] or []))
|
||||
regions = sorted({region for region in aggregation["regions"] or [] if region})
|
||||
resource_types = sorted(set(aggregation["resource_types"] or []))
|
||||
|
||||
result = {
|
||||
"services": services,
|
||||
"regions": regions,
|
||||
"resource_types": resource_types,
|
||||
}
|
||||
|
||||
serializer = FindingMetadataSerializer(data=result)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
return serializer.data
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
class PaginateByPkMixin:
|
||||
"""
|
||||
Mixin to paginate on a list of PKs (cheaper than heavy JOINs),
|
||||
re-fetch the full objects with the desired select/prefetch,
|
||||
re-sort them to preserve DB ordering, then serialize + return.
|
||||
"""
|
||||
|
||||
def paginate_by_pk(
|
||||
self,
|
||||
request, # noqa: F841
|
||||
base_queryset,
|
||||
manager,
|
||||
select_related: list[str] | None = None,
|
||||
prefetch_related: list[str] | None = None,
|
||||
) -> Response:
|
||||
pk_list = base_queryset.values_list("id", flat=True)
|
||||
page = self.paginate_queryset(pk_list)
|
||||
if page is None:
|
||||
return Response(self.get_serializer(base_queryset, many=True).data)
|
||||
|
||||
queryset = manager.filter(id__in=page)
|
||||
if select_related:
|
||||
queryset = queryset.select_related(*select_related)
|
||||
if prefetch_related:
|
||||
queryset = queryset.prefetch_related(*prefetch_related)
|
||||
|
||||
queryset = sorted(queryset, key=lambda obj: page.index(obj.id))
|
||||
|
||||
serialized = self.get_serializer(queryset, many=True).data
|
||||
return self.get_paginated_response(serialized)
|
||||
@@ -1,122 +0,0 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class S3ConfigSerializer(BaseValidateSerializer):
|
||||
bucket_name = serializers.CharField()
|
||||
output_directory = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class AWSCredentialSerializer(BaseValidateSerializer):
|
||||
role_arn = serializers.CharField(required=False)
|
||||
external_id = serializers.CharField(required=False)
|
||||
role_session_name = serializers.CharField(required=False)
|
||||
session_duration = serializers.IntegerField(
|
||||
required=False, min_value=900, max_value=43200
|
||||
)
|
||||
aws_access_key_id = serializers.CharField(required=False)
|
||||
aws_secret_access_key = serializers.CharField(required=False)
|
||||
aws_session_token = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Credentials",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationCredentialField(serializers.JSONField):
|
||||
pass
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Amazon S3",
|
||||
"properties": {
|
||||
"bucket_name": {
|
||||
"type": "string",
|
||||
"description": "The name of the S3 bucket where files will be stored.",
|
||||
},
|
||||
"output_directory": {
|
||||
"type": "string",
|
||||
"description": "The directory path within the bucket where files will be saved.",
|
||||
},
|
||||
},
|
||||
"required": ["bucket_name", "output_directory"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationConfigField(serializers.JSONField):
|
||||
pass
|
||||
@@ -1,183 +0,0 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Static Credentials",
|
||||
"properties": {
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Required for environments where no IAM role is being "
|
||||
"assumed and direct AWS access is needed.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Must accompany 'aws_access_key_id' to authorize "
|
||||
"access to AWS resources.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token associated with temporary credentials. Only needed for "
|
||||
"session-based or temporary AWS access.",
|
||||
},
|
||||
},
|
||||
"required": ["aws_access_key_id", "aws_secret_access_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Assume Role",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
"required": ["role_arn", "external_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Azure Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "tenant_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "M365 Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
"user": {
|
||||
"type": "email",
|
||||
"description": "User microsoft email address.",
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"description": "User password.",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"client_id",
|
||||
"client_secret",
|
||||
"tenant_id",
|
||||
"user",
|
||||
"password",
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The client ID from Google Cloud, used to identify the application for GCP "
|
||||
"access.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the GCP client ID, required for secure "
|
||||
"access.",
|
||||
},
|
||||
"refresh_token": {
|
||||
"type": "string",
|
||||
"description": "A refresh token that allows the application to obtain new access tokens for "
|
||||
"extended use.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "refresh_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Service Account Key",
|
||||
"properties": {
|
||||
"service_account_key": {
|
||||
"type": "object",
|
||||
"description": "The service account key for GCP.",
|
||||
}
|
||||
},
|
||||
"required": ["service_account_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Kubernetes Static Credentials",
|
||||
"properties": {
|
||||
"kubeconfig_content": {
|
||||
"type": "string",
|
||||
"description": "The content of the Kubernetes kubeconfig file, encoded as a string.",
|
||||
}
|
||||
},
|
||||
"required": ["kubeconfig_content"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class ProviderSecretField(serializers.JSONField):
|
||||
pass
|
||||
@@ -16,8 +16,6 @@ from rest_framework_simplejwt.tokens import RefreshToken
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
InvitationRoleRelationship,
|
||||
Membership,
|
||||
@@ -36,13 +34,6 @@ from api.models import (
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.v1.serializer_utils.integrations import (
|
||||
AWSCredentialSerializer,
|
||||
IntegrationConfigField,
|
||||
IntegrationCredentialField,
|
||||
S3ConfigSerializer,
|
||||
)
|
||||
from api.v1.serializer_utils.providers import ProviderSecretField
|
||||
|
||||
# Tokens
|
||||
|
||||
@@ -852,10 +843,6 @@ class ScanSerializer(RLSSerializer):
|
||||
"url",
|
||||
]
|
||||
|
||||
included_serializers = {
|
||||
"provider": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
}
|
||||
|
||||
|
||||
class ScanIncludeSerializer(RLSSerializer):
|
||||
trigger = serializers.ChoiceField(
|
||||
@@ -960,15 +947,6 @@ class ScanReportSerializer(serializers.Serializer):
|
||||
fields = ["id"]
|
||||
|
||||
|
||||
class ScanComplianceReportSerializer(serializers.Serializer):
|
||||
id = serializers.CharField(source="scan")
|
||||
name = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "scan-reports"
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ResourceTagSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the ResourceTag model
|
||||
@@ -1101,7 +1079,6 @@ class FindingSerializer(RLSSerializer):
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"url",
|
||||
# Relationships
|
||||
"scan",
|
||||
@@ -1151,16 +1128,12 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
|
||||
serializer = GCPProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.KUBERNETES.value:
|
||||
serializer = KubernetesProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.M365.value:
|
||||
serializer = M365ProviderSecret(data=secret)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{"provider": f"Provider type not supported {provider_type}"}
|
||||
)
|
||||
elif secret_type == ProviderSecret.TypeChoices.ROLE:
|
||||
serializer = AWSRoleAssumptionProviderSecret(data=secret)
|
||||
elif secret_type == ProviderSecret.TypeChoices.SERVICE_ACCOUNT:
|
||||
serializer = GCPServiceAccountProviderSecret(data=secret)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{"secret_type": f"Secret type not supported: {secret_type}"}
|
||||
@@ -1194,17 +1167,6 @@ class AzureProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class M365ProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
tenant_id = serializers.CharField()
|
||||
user = serializers.EmailField()
|
||||
password = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class GCPProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
@@ -1214,13 +1176,6 @@ class GCPProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class GCPServiceAccountProviderSecret(serializers.Serializer):
|
||||
service_account_key = serializers.JSONField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class KubernetesProviderSecret(serializers.Serializer):
|
||||
kubeconfig_content = serializers.CharField()
|
||||
|
||||
@@ -1243,6 +1198,141 @@ class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Static Credentials",
|
||||
"properties": {
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Required for environments where no IAM role is being "
|
||||
"assumed and direct AWS access is needed.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Must accompany 'aws_access_key_id' to authorize "
|
||||
"access to AWS resources.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token associated with temporary credentials. Only needed for "
|
||||
"session-based or temporary AWS access.",
|
||||
},
|
||||
},
|
||||
"required": ["aws_access_key_id", "aws_secret_access_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Assume Role",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
"required": ["role_arn", "external_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Azure Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "tenant_id"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Static Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The client ID from Google Cloud, used to identify the application for GCP "
|
||||
"access.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the GCP client ID, required for secure "
|
||||
"access.",
|
||||
},
|
||||
"refresh_token": {
|
||||
"type": "string",
|
||||
"description": "A refresh token that allows the application to obtain new access tokens for "
|
||||
"extended use.",
|
||||
},
|
||||
},
|
||||
"required": ["client_id", "client_secret", "refresh_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Kubernetes Static Credentials",
|
||||
"properties": {
|
||||
"kubeconfig_content": {
|
||||
"type": "string",
|
||||
"description": "The content of the Kubernetes kubeconfig file, encoded as a string.",
|
||||
}
|
||||
},
|
||||
"required": ["kubeconfig_content"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class ProviderSecretField(serializers.JSONField):
|
||||
pass
|
||||
|
||||
|
||||
class ProviderSecretSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the ProviderSecret model.
|
||||
@@ -1516,8 +1606,8 @@ class RoleSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"manage_account",
|
||||
# Disable for the first release
|
||||
# "manage_billing",
|
||||
# "manage_integrations",
|
||||
# /Disable for the first release
|
||||
"manage_integrations",
|
||||
"manage_providers",
|
||||
"manage_scans",
|
||||
"permission_state",
|
||||
@@ -1799,13 +1889,6 @@ class ComplianceOverviewFullSerializer(ComplianceOverviewSerializer):
|
||||
return obj.requirements
|
||||
|
||||
|
||||
class ComplianceOverviewMetadataSerializer(serializers.Serializer):
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
|
||||
class Meta:
|
||||
resource_name = "compliance-overviews-metadata"
|
||||
|
||||
|
||||
# Overviews
|
||||
|
||||
|
||||
@@ -1930,201 +2013,3 @@ class ScheduleDailyCreateSerializer(serializers.Serializer):
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class BaseWriteIntegrationSerializer(BaseWriteSerializer):
|
||||
@staticmethod
|
||||
def validate_integration_data(
|
||||
integration_type: str,
|
||||
providers: list[Provider], # noqa
|
||||
configuration: dict,
|
||||
credentials: dict,
|
||||
):
|
||||
if integration_type == Integration.IntegrationChoices.S3:
|
||||
config_serializer = S3ConfigSerializer
|
||||
credentials_serializers = [AWSCredentialSerializer]
|
||||
# TODO: This will be required for AWS Security Hub
|
||||
# if providers and not all(
|
||||
# provider.provider == Provider.ProviderChoices.AWS
|
||||
# for provider in providers
|
||||
# ):
|
||||
# raise serializers.ValidationError(
|
||||
# {"providers": "All providers must be AWS for the S3 integration."}
|
||||
# )
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"integration_type": f"Integration type not supported yet: {integration_type}"
|
||||
}
|
||||
)
|
||||
|
||||
config_serializer(data=configuration).is_valid(raise_exception=True)
|
||||
|
||||
for cred_serializer in credentials_serializers:
|
||||
try:
|
||||
cred_serializer(data=credentials).is_valid(raise_exception=True)
|
||||
break
|
||||
except ValidationError:
|
||||
continue
|
||||
else:
|
||||
raise ValidationError(
|
||||
{"credentials": "Invalid credentials for the integration type."}
|
||||
)
|
||||
|
||||
|
||||
class IntegrationSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Integration model.
|
||||
"""
|
||||
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"providers",
|
||||
"url",
|
||||
]
|
||||
|
||||
included_serializers = {
|
||||
"providers": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
representation = super().to_representation(instance)
|
||||
allowed_providers = self.context.get("allowed_providers")
|
||||
if allowed_providers:
|
||||
allowed_provider_ids = {str(provider.id) for provider in allowed_providers}
|
||||
representation["providers"] = [
|
||||
provider
|
||||
for provider in representation["providers"]
|
||||
if provider["id"] in allowed_provider_ids
|
||||
]
|
||||
return representation
|
||||
|
||||
|
||||
class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
|
||||
credentials = IntegrationCredentialField(write_only=True)
|
||||
configuration = IntegrationConfigField()
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"credentials",
|
||||
"providers",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"connected": {"read_only": True},
|
||||
"enabled": {"read_only": True},
|
||||
"connection_last_checked_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
integration_type = attrs.get("integration_type")
|
||||
providers = attrs.get("providers")
|
||||
configuration = attrs.get("configuration")
|
||||
credentials = attrs.get("credentials")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_integration_data(
|
||||
integration_type, providers, configuration, credentials
|
||||
)
|
||||
return validated_attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
providers = validated_data.pop("providers", [])
|
||||
integration = Integration.objects.create(tenant_id=tenant_id, **validated_data)
|
||||
|
||||
through_model_instances = [
|
||||
IntegrationProviderRelationship(
|
||||
integration=integration,
|
||||
provider=provider,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for provider in providers
|
||||
]
|
||||
IntegrationProviderRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
return integration
|
||||
|
||||
|
||||
class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
credentials = IntegrationCredentialField(write_only=True, required=False)
|
||||
configuration = IntegrationConfigField(required=False)
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"credentials",
|
||||
"providers",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"connected": {"read_only": True},
|
||||
"connection_last_checked_at": {"read_only": True},
|
||||
"integration_type": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
integration_type = self.instance.integration_type
|
||||
providers = attrs.get("providers")
|
||||
configuration = attrs.get("configuration") or self.instance.configuration
|
||||
credentials = attrs.get("credentials") or self.instance.credentials
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_integration_data(
|
||||
integration_type, providers, configuration, credentials
|
||||
)
|
||||
return validated_attrs
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if validated_data.get("providers") is not None:
|
||||
instance.providers.clear()
|
||||
new_relationships = [
|
||||
IntegrationProviderRelationship(
|
||||
integration=instance, provider=provider, tenant_id=tenant_id
|
||||
)
|
||||
for provider in validated_data["providers"]
|
||||
]
|
||||
IntegrationProviderRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
@@ -10,7 +10,6 @@ from api.v1.views import (
|
||||
FindingViewSet,
|
||||
GithubSocialLoginView,
|
||||
GoogleSocialLoginView,
|
||||
IntegrationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
InvitationViewSet,
|
||||
MembershipViewSet,
|
||||
@@ -48,7 +47,6 @@ router.register(
|
||||
)
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
router.register(r"integrations", IntegrationViewSet, basename="integration")
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -50,9 +50,9 @@ class RLSTask(Task):
|
||||
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
with rls_transaction(tenant_id):
|
||||
APITask.objects.update_or_create(
|
||||
APITask.objects.create(
|
||||
id=task_result_instance.task_id,
|
||||
tenant_id=tenant_id,
|
||||
defaults={"task_runner_task": task_result_instance},
|
||||
task_runner_task=task_result_instance,
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -2,9 +2,10 @@ import json
|
||||
import logging
|
||||
from enum import StrEnum
|
||||
|
||||
from config.env import env
|
||||
from django_guid.log_filters import CorrelationId
|
||||
|
||||
from config.env import env
|
||||
|
||||
|
||||
class BackendLogger(StrEnum):
|
||||
GUNICORN = "gunicorn"
|
||||
@@ -38,9 +39,9 @@ class NDJSONFormatter(logging.Formatter):
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
"transaction_id": (
|
||||
record.transaction_id if hasattr(record, "transaction_id") else None
|
||||
),
|
||||
"transaction_id": record.transaction_id
|
||||
if hasattr(record, "transaction_id")
|
||||
else None,
|
||||
}
|
||||
|
||||
# Add REST API extra fields
|
||||
|
||||
@@ -111,7 +111,6 @@ SPECTACULAR_SETTINGS = {
|
||||
"PREPROCESSING_HOOKS": [
|
||||
"drf_spectacular_jsonapi.hooks.fix_nested_path_parameters",
|
||||
],
|
||||
"TITLE": "API Reference - Prowler",
|
||||
}
|
||||
|
||||
WSGI_APPLICATION = "config.wsgi.application"
|
||||
@@ -237,10 +236,3 @@ DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = env.str(
|
||||
)
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = env.str("DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN", "")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = env.str("DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION", "")
|
||||
|
||||
# HTTP Security Headers
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
X_FRAME_OPTIONS = "DENY"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
@@ -12,8 +12,6 @@ IGNORED_EXCEPTIONS = [
|
||||
"UnauthorizedOperation",
|
||||
"AuthFailure",
|
||||
"InvalidClientTokenId",
|
||||
"AWSInvalidProviderIdError",
|
||||
"InternalServerErrorException",
|
||||
"AccessDenied",
|
||||
"No Shodan API Key", # Shodan Check
|
||||
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
|
||||
@@ -35,13 +33,6 @@ IGNORED_EXCEPTIONS = [
|
||||
"ValidationException",
|
||||
"AWSSecretAccessKeyInvalidError",
|
||||
"InvalidAction",
|
||||
"InvalidRequestException",
|
||||
"RequestExpired",
|
||||
"ConnectionClosedError",
|
||||
"MaxRetryError",
|
||||
"AWSAccessKeyIDInvalidError",
|
||||
"AWSSessionTokenExpiredError",
|
||||
"EndpointConnectionError", # AWS Service is not available in a region
|
||||
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
# Authentication Errors from GCP
|
||||
"ClientAuthenticationError",
|
||||
@@ -50,8 +41,6 @@ IGNORED_EXCEPTIONS = [
|
||||
"Permission denied to get service",
|
||||
"API has not been used in project",
|
||||
"HttpError 404 when requesting",
|
||||
"HttpError 403 when requesting",
|
||||
"HttpError 400 when requesting",
|
||||
"GCPNoAccesibleProjectsError",
|
||||
# Authentication Errors from Azure
|
||||
"ClientAuthenticationError",
|
||||
@@ -60,7 +49,6 @@ IGNORED_EXCEPTIONS = [
|
||||
"AzureNotValidClientIdError",
|
||||
"AzureNotValidClientSecretError",
|
||||
"AzureNotValidTenantIdError",
|
||||
"AzureInvalidProviderIdError",
|
||||
"AzureTenantIdAndClientSecretNotBelongingToClientIdError",
|
||||
"AzureTenantIdAndClientIdNotBelongingToClientSecretError",
|
||||
"AzureClientIdAndClientSecretNotBelongingToTenantIdError",
|
||||
@@ -97,6 +85,4 @@ sentry_sdk.init(
|
||||
# possible.
|
||||
"continuous_profiling_auto_start": True,
|
||||
},
|
||||
attach_stacktrace=True,
|
||||
ignore_errors=IGNORED_EXCEPTIONS,
|
||||
)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from config.env import env
|
||||
|
||||
# Provider Oauth settings
|
||||
GOOGLE_OAUTH_CLIENT_ID = env("SOCIAL_GOOGLE_OAUTH_CLIENT_ID", default="")
|
||||
GOOGLE_OAUTH_CLIENT_SECRET = env("SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET", default="")
|
||||
GOOGLE_OAUTH_CALLBACK_URL = env("SOCIAL_GOOGLE_OAUTH_CALLBACK_URL", default="")
|
||||
# Google Oauth settings
|
||||
GOOGLE_OAUTH_CLIENT_ID = env("DJANGO_GOOGLE_OAUTH_CLIENT_ID", default="")
|
||||
GOOGLE_OAUTH_CLIENT_SECRET = env("DJANGO_GOOGLE_OAUTH_CLIENT_SECRET", default="")
|
||||
GOOGLE_OAUTH_CALLBACK_URL = env("DJANGO_GOOGLE_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
GITHUB_OAUTH_CLIENT_ID = env("SOCIAL_GITHUB_OAUTH_CLIENT_ID", default="")
|
||||
GITHUB_OAUTH_CLIENT_SECRET = env("SOCIAL_GITHUB_OAUTH_CLIENT_SECRET", default="")
|
||||
GITHUB_OAUTH_CALLBACK_URL = env("SOCIAL_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
GITHUB_OAUTH_CLIENT_ID = env("DJANGO_GITHUB_OAUTH_CLIENT_ID", default="")
|
||||
GITHUB_OAUTH_CLIENT_SECRET = env("DJANGO_GITHUB_OAUTH_CLIENT_SECRET", default="")
|
||||
GITHUB_OAUTH_CALLBACK_URL = env("DJANGO_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
# Allauth settings
|
||||
ACCOUNT_LOGIN_METHODS = {"email"} # Use Email / Password authentication
|
||||
|
||||
@@ -10,14 +10,11 @@ from django.urls import reverse
|
||||
from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
Membership,
|
||||
Provider,
|
||||
@@ -656,7 +653,6 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
"Description": "test description orange juice",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
muted=True,
|
||||
)
|
||||
|
||||
finding2.add_resources([resource2])
|
||||
@@ -881,95 +877,6 @@ def scan_summaries_fixture(tenants_fixture, providers_fixture):
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def integrations_fixture(providers_fixture):
|
||||
provider1, provider2, *_ = providers_fixture
|
||||
tenant_id = provider1.tenant_id
|
||||
integration1 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration1,
|
||||
provider=provider1,
|
||||
)
|
||||
|
||||
integration2 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider1,
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider2,
|
||||
)
|
||||
|
||||
return integration1, integration2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def backfill_scan_metadata_fixture(scans_fixture, findings_fixture):
|
||||
for scan_instance in scans_fixture:
|
||||
tenant_id = scan_instance.tenant_id
|
||||
scan_id = scan_instance.id
|
||||
backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_finding(authenticated_client, providers_fixture, resources_fixture):
|
||||
provider = providers_fixture[0]
|
||||
tenant_id = str(providers_fixture[0].tenant_id)
|
||||
resource = resources_fixture[0]
|
||||
scan = Scan.objects.create(
|
||||
name="latest completed scan",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
uid="test_finding_uid_1",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="test status extended ",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact extended one",
|
||||
severity=Severity.critical,
|
||||
raw_result={
|
||||
"status": Status.FAIL,
|
||||
"impact": Severity.critical,
|
||||
"severity": Severity.critical,
|
||||
},
|
||||
tags={"test": "dev-qa"},
|
||||
check_id="test_check_id",
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
|
||||
finding.add_resources([resource])
|
||||
backfill_resource_scan_summaries(tenant_id, str(scan.id))
|
||||
return finding
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
StateChoices,
|
||||
)
|
||||
|
||||
|
||||
def backfill_resource_scan_summaries(tenant_id: str, scan_id: str):
|
||||
with rls_transaction(tenant_id):
|
||||
if ResourceScanSummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).exists():
|
||||
return {"status": "already backfilled"}
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
if not Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
id=scan_id,
|
||||
state__in=(StateChoices.COMPLETED, StateChoices.FAILED),
|
||||
).exists():
|
||||
return {"status": "scan is not completed"}
|
||||
|
||||
resource_ids_qs = (
|
||||
ResourceFindingMapping.objects.filter(
|
||||
tenant_id=tenant_id, finding__scan_id=scan_id
|
||||
)
|
||||
.values_list("resource_id", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
resource_ids = list(resource_ids_qs)
|
||||
|
||||
if not resource_ids:
|
||||
return {"status": "no resources to backfill"}
|
||||
|
||||
resources_qs = Resource.objects.filter(
|
||||
tenant_id=tenant_id, id__in=resource_ids
|
||||
).only("id", "service", "region", "type")
|
||||
|
||||
summaries = []
|
||||
for resource in resources_qs.iterator():
|
||||
summaries.append(
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
resource_id=str(resource.id),
|
||||
service=resource.service,
|
||||
region=resource.region,
|
||||
resource_type=resource.type,
|
||||
)
|
||||
)
|
||||
|
||||
for i in range(0, len(summaries), 500):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
summaries[i : i + 500], ignore_conflicts=True
|
||||
)
|
||||
|
||||
return {"status": "backfilled", "inserted": len(summaries)}
|
||||
@@ -1,5 +1,5 @@
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.db import DatabaseError
|
||||
from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import batch_delete, rls_transaction
|
||||
@@ -8,12 +8,11 @@ from api.models import Finding, Provider, Resource, Scan, ScanSummary, Tenant
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def delete_provider(tenant_id: str, pk: str):
|
||||
def delete_provider(pk: str):
|
||||
"""
|
||||
Gracefully deletes an instance of a provider along with its related data.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the resources belong to.
|
||||
pk (str): The primary key of the Provider instance to delete.
|
||||
|
||||
Returns:
|
||||
@@ -23,31 +22,33 @@ def delete_provider(tenant_id: str, pk: str):
|
||||
Raises:
|
||||
Provider.DoesNotExist: If no instance with the provided primary key exists.
|
||||
"""
|
||||
with rls_transaction(tenant_id):
|
||||
instance = Provider.all_objects.get(pk=pk)
|
||||
deletion_summary = {}
|
||||
deletion_steps = [
|
||||
("Scan Summaries", ScanSummary.all_objects.filter(scan__provider=instance)),
|
||||
("Findings", Finding.all_objects.filter(scan__provider=instance)),
|
||||
("Resources", Resource.all_objects.filter(provider=instance)),
|
||||
("Scans", Scan.all_objects.filter(provider=instance)),
|
||||
]
|
||||
instance = Provider.all_objects.get(pk=pk)
|
||||
deletion_summary = {}
|
||||
|
||||
for step_name, queryset in deletion_steps:
|
||||
try:
|
||||
_, step_summary = batch_delete(tenant_id, queryset)
|
||||
deletion_summary.update(step_summary)
|
||||
except DatabaseError as db_error:
|
||||
logger.error(f"Error deleting {step_name}: {db_error}")
|
||||
raise
|
||||
with transaction.atomic():
|
||||
# Delete Scan Summaries
|
||||
scan_summaries_qs = ScanSummary.all_objects.filter(scan__provider=instance)
|
||||
_, scans_summ_summary = batch_delete(scan_summaries_qs)
|
||||
deletion_summary.update(scans_summ_summary)
|
||||
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
_, provider_summary = instance.delete()
|
||||
# Delete Findings
|
||||
findings_qs = Finding.all_objects.filter(scan__provider=instance)
|
||||
_, findings_summary = batch_delete(findings_qs)
|
||||
deletion_summary.update(findings_summary)
|
||||
|
||||
# Delete Resources
|
||||
resources_qs = Resource.all_objects.filter(provider=instance)
|
||||
_, resources_summary = batch_delete(resources_qs)
|
||||
deletion_summary.update(resources_summary)
|
||||
|
||||
# Delete Scans
|
||||
scans_qs = Scan.all_objects.filter(provider=instance)
|
||||
_, scans_summary = batch_delete(scans_qs)
|
||||
deletion_summary.update(scans_summary)
|
||||
|
||||
provider_deleted_count, provider_summary = instance.delete()
|
||||
deletion_summary.update(provider_summary)
|
||||
except DatabaseError as db_error:
|
||||
logger.error(f"Error deleting Provider: {db_error}")
|
||||
raise
|
||||
|
||||
return deletion_summary
|
||||
|
||||
|
||||
@@ -65,8 +66,9 @@ def delete_tenant(pk: str):
|
||||
deletion_summary = {}
|
||||
|
||||
for provider in Provider.objects.using(MainRouter.admin_db).filter(tenant_id=pk):
|
||||
summary = delete_provider(pk, provider.id)
|
||||
deletion_summary.update(summary)
|
||||
with rls_transaction(pk):
|
||||
summary = delete_provider(provider.id)
|
||||
deletion_summary.update(summary)
|
||||
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(id=pk).delete()
|
||||
|
||||
|
||||
@@ -13,41 +13,6 @@ from prowler.config.config import (
|
||||
json_ocsf_file_suffix,
|
||||
output_file_timestamp,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
|
||||
KubernetesISO27001,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
AzureMitreAttack,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_aws import (
|
||||
ProwlerThreatScoreAWS,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_azure import (
|
||||
ProwlerThreatScoreAzure,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_gcp import (
|
||||
ProwlerThreatScoreGCP,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_m365 import (
|
||||
ProwlerThreatScoreM365,
|
||||
)
|
||||
from prowler.lib.outputs.csv.csv import CSV
|
||||
from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
@@ -55,44 +20,6 @@ from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
COMPLIANCE_CLASS_MAP = {
|
||||
"aws": [
|
||||
(lambda name: name.startswith("cis_"), AWSCIS),
|
||||
(lambda name: name == "mitre_attack_aws", AWSMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), AWSENS),
|
||||
(
|
||||
lambda name: name.startswith("aws_well_architected_framework"),
|
||||
AWSWellArchitected,
|
||||
),
|
||||
(lambda name: name.startswith("iso27001_"), AWSISO27001),
|
||||
(lambda name: name.startswith("kisa"), AWSKISAISMSP),
|
||||
(lambda name: name == "prowler_threatscore_aws", ProwlerThreatScoreAWS),
|
||||
],
|
||||
"azure": [
|
||||
(lambda name: name.startswith("cis_"), AzureCIS),
|
||||
(lambda name: name == "mitre_attack_azure", AzureMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), AzureENS),
|
||||
(lambda name: name.startswith("iso27001_"), AzureISO27001),
|
||||
(lambda name: name == "prowler_threatscore_azure", ProwlerThreatScoreAzure),
|
||||
],
|
||||
"gcp": [
|
||||
(lambda name: name.startswith("cis_"), GCPCIS),
|
||||
(lambda name: name == "mitre_attack_gcp", GCPMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), GCPENS),
|
||||
(lambda name: name.startswith("iso27001_"), GCPISO27001),
|
||||
(lambda name: name == "prowler_threatscore_gcp", ProwlerThreatScoreGCP),
|
||||
],
|
||||
"kubernetes": [
|
||||
(lambda name: name.startswith("cis_"), KubernetesCIS),
|
||||
(lambda name: name.startswith("iso27001_"), KubernetesISO27001),
|
||||
],
|
||||
"m365": [
|
||||
(lambda name: name.startswith("cis_"), M365CIS),
|
||||
(lambda name: name == "prowler_threatscore_m365", ProwlerThreatScoreM365),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
# Predefined mapping for output formats and their configurations
|
||||
OUTPUT_FORMATS_MAPPING = {
|
||||
"csv": {
|
||||
@@ -116,17 +43,13 @@ def _compress_output_files(output_directory: str) -> str:
|
||||
str: The full path to the newly created ZIP archive.
|
||||
"""
|
||||
zip_path = f"{output_directory}.zip"
|
||||
parent_dir = os.path.dirname(output_directory)
|
||||
zip_path_abs = os.path.abspath(zip_path)
|
||||
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for foldername, _, filenames in os.walk(parent_dir):
|
||||
for filename in filenames:
|
||||
file_path = os.path.join(foldername, filename)
|
||||
if os.path.abspath(file_path) == zip_path_abs:
|
||||
continue
|
||||
arcname = os.path.relpath(file_path, start=parent_dir)
|
||||
zipf.write(file_path, arcname)
|
||||
for suffix in [config["suffix"] for config in OUTPUT_FORMATS_MAPPING.values()]:
|
||||
zipf.write(
|
||||
f"{output_directory}{suffix}",
|
||||
f"output/{output_directory.split('/')[-1]}{suffix}",
|
||||
)
|
||||
|
||||
return zip_path
|
||||
|
||||
@@ -179,38 +102,25 @@ def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str:
|
||||
Raises:
|
||||
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
|
||||
"""
|
||||
bucket = base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET
|
||||
if not bucket:
|
||||
return None
|
||||
if not base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET:
|
||||
return
|
||||
|
||||
try:
|
||||
s3 = get_s3_client()
|
||||
|
||||
# Upload the ZIP file (outputs) to the S3 bucket
|
||||
zip_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
s3_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
s3.upload_file(
|
||||
Filename=zip_path,
|
||||
Bucket=bucket,
|
||||
Key=zip_key,
|
||||
Bucket=base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET,
|
||||
Key=s3_key,
|
||||
)
|
||||
|
||||
# Upload the compliance directory to the S3 bucket
|
||||
compliance_dir = os.path.join(os.path.dirname(zip_path), "compliance")
|
||||
for filename in os.listdir(compliance_dir):
|
||||
local_path = os.path.join(compliance_dir, filename)
|
||||
if not os.path.isfile(local_path):
|
||||
continue
|
||||
file_key = f"{tenant_id}/{scan_id}/compliance/{filename}"
|
||||
s3.upload_file(Filename=local_path, Bucket=bucket, Key=file_key)
|
||||
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{zip_key}"
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{s3_key}"
|
||||
except (ClientError, NoCredentialsError, ParamValidationError, ValueError) as e:
|
||||
logger.error(f"S3 upload failed: {str(e)}")
|
||||
|
||||
|
||||
def _generate_output_directory(
|
||||
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
|
||||
) -> tuple[str, str]:
|
||||
) -> str:
|
||||
"""
|
||||
Generate a file system path for the output directory of a prowler scan.
|
||||
|
||||
@@ -235,8 +145,7 @@ def _generate_output_directory(
|
||||
|
||||
Example:
|
||||
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
|
||||
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/prowler-output-"
|
||||
@@ -244,10 +153,4 @@ def _generate_output_directory(
|
||||
)
|
||||
os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
compliance_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/compliance/prowler-output-"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path
|
||||
return path
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
@@ -7,7 +6,6 @@ from celery.utils.log import get_task_logger
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
from django.db import IntegrityError, OperationalError
|
||||
from django.db.models import Case, Count, IntegerField, Sum, When
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
@@ -19,7 +17,6 @@ from api.models import (
|
||||
Finding,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceScanSummary,
|
||||
ResourceTag,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
@@ -122,9 +119,7 @@ def perform_prowler_scan(
|
||||
check_status_by_region = {}
|
||||
exception = None
|
||||
unique_resources = set()
|
||||
scan_resource_cache: set[tuple[str, str, str, str]] = set()
|
||||
start_time = time.time()
|
||||
exc = None
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
@@ -140,7 +135,7 @@ def perform_prowler_scan(
|
||||
provider_instance.connected = True
|
||||
except Exception as e:
|
||||
provider_instance.connected = False
|
||||
exc = ValueError(
|
||||
raise ValueError(
|
||||
f"Provider {provider_instance.provider} is not connected: {e}"
|
||||
)
|
||||
finally:
|
||||
@@ -149,11 +144,6 @@ def perform_prowler_scan(
|
||||
)
|
||||
provider_instance.save()
|
||||
|
||||
# If the provider is not connected, raise an exception outside the transaction.
|
||||
# If raised within the transaction, the transaction will be rolled back and the provider will not be marked as not connected.
|
||||
if exc:
|
||||
raise exc
|
||||
|
||||
prowler_scan = ProwlerScan(provider=prowler_provider, checks=checks_to_execute)
|
||||
|
||||
resource_cache = {}
|
||||
@@ -201,17 +191,6 @@ def perform_prowler_scan(
|
||||
if resource_instance.type != finding.resource_type:
|
||||
resource_instance.type = finding.resource_type
|
||||
updated_fields.append("type")
|
||||
if resource_instance.metadata != finding.resource_metadata:
|
||||
resource_instance.metadata = json.dumps(
|
||||
finding.resource_metadata, cls=CustomEncoder
|
||||
)
|
||||
updated_fields.append("metadata")
|
||||
if resource_instance.details != finding.resource_details:
|
||||
resource_instance.details = finding.resource_details
|
||||
updated_fields.append("details")
|
||||
if resource_instance.partition != finding.partition:
|
||||
resource_instance.partition = finding.partition
|
||||
updated_fields.append("partition")
|
||||
if updated_fields:
|
||||
with rls_transaction(tenant_id):
|
||||
resource_instance.save(update_fields=updated_fields)
|
||||
@@ -288,8 +267,6 @@ def perform_prowler_scan(
|
||||
check_id=finding.check_id,
|
||||
scan=scan_instance,
|
||||
first_seen_at=last_first_seen_at,
|
||||
muted=finding.muted,
|
||||
compliance=finding.compliance,
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
@@ -303,16 +280,6 @@ def perform_prowler_scan(
|
||||
continue
|
||||
region_dict[finding.check_id] = finding.status.value
|
||||
|
||||
# Update scan resource summaries
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
str(resource_instance.id),
|
||||
resource_instance.service,
|
||||
resource_instance.region,
|
||||
resource_instance.type,
|
||||
)
|
||||
)
|
||||
|
||||
# Update scan progress
|
||||
with rls_transaction(tenant_id):
|
||||
scan_instance.progress = progress
|
||||
@@ -332,90 +299,66 @@ def perform_prowler_scan(
|
||||
scan_instance.unique_resource_count = len(unique_resources)
|
||||
scan_instance.save()
|
||||
|
||||
if exception is None:
|
||||
try:
|
||||
regions = prowler_provider.get_regions()
|
||||
except AttributeError:
|
||||
regions = set()
|
||||
|
||||
compliance_template = PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE[
|
||||
provider_instance.provider
|
||||
]
|
||||
compliance_overview_by_region = {
|
||||
region: deepcopy(compliance_template) for region in regions
|
||||
}
|
||||
|
||||
for region, check_status in check_status_by_region.items():
|
||||
compliance_data = compliance_overview_by_region.setdefault(
|
||||
region, deepcopy(compliance_template)
|
||||
)
|
||||
for check_name, status in check_status.items():
|
||||
generate_scan_compliance(
|
||||
compliance_data,
|
||||
provider_instance.provider,
|
||||
check_name,
|
||||
status,
|
||||
)
|
||||
|
||||
# Prepare compliance overview objects
|
||||
compliance_overview_objects = []
|
||||
for region, compliance_data in compliance_overview_by_region.items():
|
||||
for compliance_id, compliance in compliance_data.items():
|
||||
compliance_overview_objects.append(
|
||||
ComplianceOverview(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan_instance,
|
||||
region=region,
|
||||
compliance_id=compliance_id,
|
||||
framework=compliance["framework"],
|
||||
version=compliance["version"],
|
||||
description=compliance["description"],
|
||||
requirements=compliance["requirements"],
|
||||
requirements_passed=compliance["requirements_status"]["passed"],
|
||||
requirements_failed=compliance["requirements_status"]["failed"],
|
||||
requirements_manual=compliance["requirements_status"]["manual"],
|
||||
total_requirements=compliance["total_requirements"],
|
||||
)
|
||||
)
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceOverview.objects.bulk_create(
|
||||
compliance_overview_objects, batch_size=100
|
||||
)
|
||||
except Exception as overview_exception:
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.capture_exception(overview_exception)
|
||||
logger.error(
|
||||
f"Error storing compliance overview for scan {scan_id}: {overview_exception}"
|
||||
)
|
||||
if exception is not None:
|
||||
raise exception
|
||||
|
||||
try:
|
||||
regions = prowler_provider.get_regions()
|
||||
except AttributeError:
|
||||
regions = set()
|
||||
|
||||
compliance_template = PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE[
|
||||
provider_instance.provider
|
||||
]
|
||||
compliance_overview_by_region = {
|
||||
region: deepcopy(compliance_template) for region in regions
|
||||
}
|
||||
|
||||
for region, check_status in check_status_by_region.items():
|
||||
compliance_data = compliance_overview_by_region.setdefault(
|
||||
region, deepcopy(compliance_template)
|
||||
)
|
||||
for check_name, status in check_status.items():
|
||||
generate_scan_compliance(
|
||||
compliance_data,
|
||||
provider_instance.provider,
|
||||
check_name,
|
||||
status,
|
||||
)
|
||||
|
||||
# Prepare compliance overview objects
|
||||
compliance_overview_objects = []
|
||||
for region, compliance_data in compliance_overview_by_region.items():
|
||||
for compliance_id, compliance in compliance_data.items():
|
||||
compliance_overview_objects.append(
|
||||
ComplianceOverview(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan_instance,
|
||||
region=region,
|
||||
compliance_id=compliance_id,
|
||||
framework=compliance["framework"],
|
||||
version=compliance["version"],
|
||||
description=compliance["description"],
|
||||
requirements=compliance["requirements"],
|
||||
requirements_passed=compliance["requirements_status"]["passed"],
|
||||
requirements_failed=compliance["requirements_status"]["failed"],
|
||||
requirements_manual=compliance["requirements_status"]["manual"],
|
||||
total_requirements=compliance["total_requirements"],
|
||||
)
|
||||
)
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceOverview.objects.bulk_create(
|
||||
compliance_overview_objects, batch_size=500
|
||||
)
|
||||
except Exception as overview_exception:
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.capture_exception(overview_exception)
|
||||
logger.error(
|
||||
f"Error storing compliance overview for scan {scan_id}: {overview_exception}"
|
||||
)
|
||||
|
||||
try:
|
||||
resource_scan_summaries = [
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
resource_id=resource_id,
|
||||
service=service,
|
||||
region=region,
|
||||
resource_type=resource_type,
|
||||
)
|
||||
for resource_id, service, region, resource_type in scan_resource_cache
|
||||
]
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
resource_scan_summaries, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
except Exception as filter_exception:
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.capture_exception(filter_exception)
|
||||
logger.error(
|
||||
f"Error storing filter values for scan {scan_id}: {filter_exception}"
|
||||
)
|
||||
|
||||
serializer = ScanTaskSerializer(instance=scan_instance)
|
||||
return serializer.data
|
||||
|
||||
@@ -459,21 +402,21 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
).annotate(
|
||||
fail=Sum(
|
||||
Case(
|
||||
When(status="FAIL", muted=False, then=1),
|
||||
When(status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
_pass=Sum(
|
||||
Case(
|
||||
When(status="PASS", muted=False, then=1),
|
||||
When(status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_count=Sum(
|
||||
muted=Sum(
|
||||
Case(
|
||||
When(muted=True, then=1),
|
||||
When(status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
@@ -481,63 +424,63 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
total=Count("id"),
|
||||
new=Sum(
|
||||
Case(
|
||||
When(delta="new", muted=False, then=1),
|
||||
When(delta="new", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", muted=False, then=1),
|
||||
When(delta="changed", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
unchanged=Sum(
|
||||
Case(
|
||||
When(delta__isnull=True, muted=False, then=1),
|
||||
When(delta__isnull=True, then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
fail_new=Sum(
|
||||
Case(
|
||||
When(delta="new", status="FAIL", muted=False, then=1),
|
||||
When(delta="new", status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
fail_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", status="FAIL", muted=False, then=1),
|
||||
When(delta="changed", status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
pass_new=Sum(
|
||||
Case(
|
||||
When(delta="new", status="PASS", muted=False, then=1),
|
||||
When(delta="new", status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
pass_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", status="PASS", muted=False, then=1),
|
||||
When(delta="changed", status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_new=Sum(
|
||||
Case(
|
||||
When(delta="new", muted=True, then=1),
|
||||
When(delta="new", status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", muted=True, then=1),
|
||||
When(delta="changed", status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
@@ -555,7 +498,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
region=agg["resources__region"],
|
||||
fail=agg["fail"],
|
||||
_pass=agg["_pass"],
|
||||
muted=agg["muted_count"],
|
||||
muted=agg["muted"],
|
||||
total=agg["total"],
|
||||
new=agg["new"],
|
||||
changed=agg["changed"],
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
|
||||
@@ -7,11 +6,9 @@ from celery.utils.log import get_task_logger
|
||||
from config.celery import RLSTask
|
||||
from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIRECTORY
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
from tasks.jobs.deletion import delete_provider, delete_tenant
|
||||
from tasks.jobs.export import (
|
||||
COMPLIANCE_CLASS_MAP,
|
||||
OUTPUT_FORMATS_MAPPING,
|
||||
_compress_output_files,
|
||||
_generate_output_directory,
|
||||
@@ -20,14 +17,10 @@ from tasks.jobs.export import (
|
||||
from tasks.jobs.scan import aggregate_findings, perform_prowler_scan
|
||||
from tasks.utils import batched, get_next_execution_datetime
|
||||
|
||||
from api.compliance import get_compliance_frameworks
|
||||
from api.db_utils import rls_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.models import Finding, Provider, Scan, ScanSummary, StateChoices
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
@@ -50,10 +43,9 @@ def check_provider_connection_task(provider_id: str):
|
||||
return check_provider_connection(provider_id=provider_id)
|
||||
|
||||
|
||||
@shared_task(
|
||||
base=RLSTask, name="provider-deletion", queue="deletion", autoretry_for=(Exception,)
|
||||
)
|
||||
def delete_provider_task(provider_id: str, tenant_id: str):
|
||||
@shared_task(base=RLSTask, name="provider-deletion", queue="deletion")
|
||||
@set_tenant
|
||||
def delete_provider_task(provider_id: str):
|
||||
"""
|
||||
Task to delete a specific Provider instance.
|
||||
|
||||
@@ -61,7 +53,6 @@ def delete_provider_task(provider_id: str, tenant_id: str):
|
||||
|
||||
Args:
|
||||
provider_id (str): The primary key of the `Provider` instance to be deleted.
|
||||
tenant_id (str): Tenant ID the provider belongs to.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing:
|
||||
@@ -69,7 +60,7 @@ def delete_provider_task(provider_id: str, tenant_id: str):
|
||||
- A dictionary with the count of deleted instances per model,
|
||||
including related models if cascading deletes were triggered.
|
||||
"""
|
||||
return delete_provider(tenant_id=tenant_id, pk=provider_id)
|
||||
return delete_provider(pk=provider_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-perform", queue="scans")
|
||||
@@ -135,43 +126,6 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
periodic_task_instance = PeriodicTask.objects.get(
|
||||
name=f"scan-perform-scheduled-{provider_id}"
|
||||
)
|
||||
|
||||
executed_scan = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
task__task_runner_task__task_id=task_id,
|
||||
).order_by("completed_at")
|
||||
|
||||
if (
|
||||
Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.EXECUTING,
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
scheduled_at__date=datetime.now(timezone.utc).date(),
|
||||
).exists()
|
||||
or executed_scan.exists()
|
||||
):
|
||||
# Duplicated task execution due to visibility timeout or scan is already running
|
||||
logger.warning(f"Duplicated scheduled scan for provider {provider_id}.")
|
||||
try:
|
||||
affected_scan = executed_scan.first()
|
||||
if not affected_scan:
|
||||
raise ValueError(
|
||||
"Error retrieving affected scan details after detecting duplicated scheduled "
|
||||
"scan."
|
||||
)
|
||||
# Return the affected scan details to avoid losing data
|
||||
serializer = ScanTaskSerializer(instance=affected_scan)
|
||||
except Exception as duplicated_scan_exception:
|
||||
logger.error(
|
||||
f"Duplicated scheduled scan for provider {provider_id}. Error retrieving affected scan details: "
|
||||
f"{str(duplicated_scan_exception)}"
|
||||
)
|
||||
raise duplicated_scan_exception
|
||||
return serializer.data
|
||||
|
||||
next_scan_datetime = get_next_execution_datetime(task_id, provider_id)
|
||||
scan_instance, _ = Scan.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
@@ -179,11 +133,7 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state__in=(StateChoices.SCHEDULED, StateChoices.AVAILABLE),
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
defaults={
|
||||
"state": StateChoices.SCHEDULED,
|
||||
"name": "Daily scheduled scan",
|
||||
"scheduled_at": next_scan_datetime - timedelta(days=1),
|
||||
},
|
||||
defaults={"state": StateChoices.SCHEDULED},
|
||||
)
|
||||
|
||||
scan_instance.task_id = task_id
|
||||
@@ -224,7 +174,7 @@ def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="tenant-deletion", queue="deletion", autoretry_for=(Exception,))
|
||||
@shared_task(name="tenant-deletion", queue="deletion")
|
||||
def delete_tenant_task(tenant_id: str):
|
||||
return delete_tenant(pk=tenant_id)
|
||||
|
||||
@@ -251,123 +201,84 @@ def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
|
||||
scan_id (str): The scan identifier.
|
||||
provider_id (str): The provider_id id to be used in generating outputs.
|
||||
"""
|
||||
# Check if the scan has findings
|
||||
if not ScanSummary.objects.filter(scan_id=scan_id).exists():
|
||||
logger.info(f"No findings found for scan {scan_id}")
|
||||
return {"upload": False}
|
||||
# Initialize the prowler provider
|
||||
prowler_provider = initialize_prowler_provider(Provider.objects.get(id=provider_id))
|
||||
|
||||
provider_obj = Provider.objects.get(id=provider_id)
|
||||
prowler_provider = initialize_prowler_provider(provider_obj)
|
||||
provider_uid = provider_obj.uid
|
||||
provider_type = provider_obj.provider
|
||||
# Get the provider UID
|
||||
provider_uid = Provider.objects.get(id=provider_id).uid
|
||||
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
frameworks_avail = get_compliance_frameworks(provider_type)
|
||||
out_dir, comp_dir = _generate_output_directory(
|
||||
# Generate and ensure the output directory exists
|
||||
output_directory = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
)
|
||||
|
||||
def get_writer(writer_map, name, factory, is_last):
|
||||
"""
|
||||
Return existing writer_map[name] or create via factory().
|
||||
In both cases set `.close_file = is_last`.
|
||||
"""
|
||||
initialization = False
|
||||
if name not in writer_map:
|
||||
writer_map[name] = factory()
|
||||
initialization = True
|
||||
w = writer_map[name]
|
||||
w.close_file = is_last
|
||||
|
||||
return w, initialization
|
||||
|
||||
# Define auxiliary variables
|
||||
output_writers = {}
|
||||
compliance_writers = {}
|
||||
|
||||
scan_summary = FindingOutput._transform_findings_stats(
|
||||
ScanSummary.objects.filter(scan_id=scan_id)
|
||||
)
|
||||
|
||||
qs = Finding.all_objects.filter(scan_id=scan_id).order_by("uid").iterator()
|
||||
for batch, is_last in batched(qs, DJANGO_FINDINGS_BATCH_SIZE):
|
||||
fos = [FindingOutput.transform_api_finding(f, prowler_provider) for f in batch]
|
||||
# Retrieve findings queryset
|
||||
findings_qs = Finding.all_objects.filter(scan_id=scan_id).order_by("uid")
|
||||
|
||||
# Outputs
|
||||
for mode, cfg in OUTPUT_FORMATS_MAPPING.items():
|
||||
cls = cfg["class"]
|
||||
suffix = cfg["suffix"]
|
||||
extra = cfg.get("kwargs", {}).copy()
|
||||
# Process findings in batches
|
||||
for batch, is_last_batch in batched(
|
||||
findings_qs.iterator(), DJANGO_FINDINGS_BATCH_SIZE
|
||||
):
|
||||
finding_outputs = [
|
||||
FindingOutput.transform_api_finding(finding, prowler_provider)
|
||||
for finding in batch
|
||||
]
|
||||
|
||||
# Generate output files
|
||||
for mode, config in OUTPUT_FORMATS_MAPPING.items():
|
||||
kwargs = dict(config.get("kwargs", {}))
|
||||
if mode == "html":
|
||||
extra.update(provider=prowler_provider, stats=scan_summary)
|
||||
kwargs["provider"] = prowler_provider
|
||||
kwargs["stats"] = scan_summary
|
||||
|
||||
writer, initialization = get_writer(
|
||||
output_writers,
|
||||
cls,
|
||||
lambda cls=cls, fos=fos, suffix=suffix: cls(
|
||||
findings=fos,
|
||||
file_path=out_dir,
|
||||
file_extension=suffix,
|
||||
writer_class = config["class"]
|
||||
if writer_class in output_writers:
|
||||
writer = output_writers[writer_class]
|
||||
writer.transform(finding_outputs)
|
||||
writer.close_file = is_last_batch
|
||||
else:
|
||||
writer = writer_class(
|
||||
findings=finding_outputs,
|
||||
file_path=output_directory,
|
||||
file_extension=config["suffix"],
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos)
|
||||
writer.batch_write_data_to_file(**extra)
|
||||
writer._data.clear()
|
||||
)
|
||||
writer.close_file = is_last_batch
|
||||
output_writers[writer_class] = writer
|
||||
|
||||
# Compliance CSVs
|
||||
for name in frameworks_avail:
|
||||
compliance_obj = frameworks_bulk[name]
|
||||
# Write the current batch using the writer
|
||||
writer.batch_write_data_to_file(**kwargs)
|
||||
|
||||
klass = GenericCompliance
|
||||
for condition, cls in COMPLIANCE_CLASS_MAP.get(provider_type, []):
|
||||
if condition(name):
|
||||
klass = cls
|
||||
break
|
||||
# TODO: Refactor the output classes to avoid this manual reset
|
||||
writer._data = []
|
||||
|
||||
filename = f"{comp_dir}_{name}.csv"
|
||||
# Compress output files
|
||||
output_directory = _compress_output_files(output_directory)
|
||||
|
||||
writer, initialization = get_writer(
|
||||
compliance_writers,
|
||||
name,
|
||||
lambda klass=klass, fos=fos: klass(
|
||||
findings=fos,
|
||||
compliance=compliance_obj,
|
||||
file_path=filename,
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos, compliance_obj, name)
|
||||
writer.batch_write_data_to_file()
|
||||
writer._data.clear()
|
||||
# Save to configured storage
|
||||
uploaded = _upload_to_s3(tenant_id, output_directory, scan_id)
|
||||
|
||||
compressed = _compress_output_files(out_dir)
|
||||
upload_uri = _upload_to_s3(tenant_id, compressed, scan_id)
|
||||
|
||||
if upload_uri:
|
||||
if uploaded:
|
||||
# Remove the local files after upload
|
||||
try:
|
||||
rmtree(Path(compressed).parent, ignore_errors=True)
|
||||
except Exception as e:
|
||||
rmtree(Path(output_directory).parent, ignore_errors=True)
|
||||
except FileNotFoundError as e:
|
||||
logger.error(f"Error deleting output files: {e}")
|
||||
final_location, did_upload = upload_uri, True
|
||||
|
||||
output_directory = uploaded
|
||||
uploaded = True
|
||||
else:
|
||||
final_location, did_upload = compressed, False
|
||||
uploaded = False
|
||||
|
||||
Scan.all_objects.filter(id=scan_id).update(output_location=final_location)
|
||||
logger.info(f"Scan outputs at {final_location}")
|
||||
return {"upload": did_upload}
|
||||
# Update the scan instance with the output path
|
||||
Scan.all_objects.filter(id=scan_id).update(output_location=output_directory)
|
||||
|
||||
logger.info(f"Scan output files generated, output location: {output_directory}")
|
||||
|
||||
@shared_task(name="backfill-scan-resource-summaries", queue="backfill")
|
||||
def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Tries to backfill the resource scan summaries table for a given scan.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier.
|
||||
scan_id (str): The scan identifier.
|
||||
"""
|
||||
return backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
return {"upload": uploaded}
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
|
||||
from api.models import ResourceScanSummary, Scan, StateChoices
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestBackfillResourceScanSummaries:
|
||||
@pytest.fixture(scope="function")
|
||||
def resource_scan_summary_data(self, scans_fixture):
|
||||
scan = scans_fixture[0]
|
||||
return ResourceScanSummary.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
scan_id=scan.id,
|
||||
resource_id=str(uuid4()),
|
||||
service="aws",
|
||||
region="us-east-1",
|
||||
resource_type="instance",
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def get_not_completed_scans(self, providers_fixture):
|
||||
provider_id = providers_fixture[0].id
|
||||
tenant_id = providers_fixture[0].tenant_id
|
||||
scan_1 = Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.EXECUTING,
|
||||
provider_id=provider_id,
|
||||
)
|
||||
scan_2 = Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.AVAILABLE,
|
||||
provider_id=provider_id,
|
||||
)
|
||||
return scan_1, scan_2
|
||||
|
||||
def test_already_backfilled(self, resource_scan_summary_data):
|
||||
tenant_id = resource_scan_summary_data.tenant_id
|
||||
scan_id = resource_scan_summary_data.scan_id
|
||||
|
||||
result = backfill_resource_scan_summaries(tenant_id, scan_id)
|
||||
|
||||
assert result == {"status": "already backfilled"}
|
||||
|
||||
def test_not_completed_scan(self, get_not_completed_scans):
|
||||
for scan_instance in get_not_completed_scans:
|
||||
tenant_id = scan_instance.tenant_id
|
||||
scan_id = scan_instance.id
|
||||
result = backfill_resource_scan_summaries(tenant_id, scan_id)
|
||||
|
||||
assert result == {"status": "scan is not completed"}
|
||||
|
||||
def test_successful_backfill_inserts_one_summary(
|
||||
self, resources_fixture, findings_fixture
|
||||
):
|
||||
tenant_id = findings_fixture[0].tenant_id
|
||||
scan_id = findings_fixture[0].scan_id
|
||||
|
||||
# This scan affects the first two resources
|
||||
resources = resources_fixture[:2]
|
||||
|
||||
result = backfill_resource_scan_summaries(tenant_id, scan_id)
|
||||
assert result == {"status": "backfilled", "inserted": len(resources)}
|
||||
|
||||
# Verify correct values
|
||||
summaries = ResourceScanSummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
)
|
||||
assert summaries.count() == len(resources)
|
||||
for resource in resources:
|
||||
summary = summaries.get(resource_id=resource.id)
|
||||
assert summary.resource_id == resource.id
|
||||
assert summary.service == resource.service
|
||||
assert summary.region == resource.region
|
||||
assert summary.resource_type == resource.type
|
||||
@@ -9,19 +9,17 @@ from api.models import Provider, Tenant
|
||||
class TestDeleteProvider:
|
||||
def test_delete_provider_success(self, providers_fixture):
|
||||
instance = providers_fixture[0]
|
||||
tenant_id = str(instance.tenant_id)
|
||||
result = delete_provider(tenant_id, instance.id)
|
||||
result = delete_provider(instance.id)
|
||||
|
||||
assert result
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
Provider.objects.get(pk=instance.id)
|
||||
|
||||
def test_delete_provider_does_not_exist(self, tenants_fixture):
|
||||
tenant_id = str(tenants_fixture[0].id)
|
||||
def test_delete_provider_does_not_exist(self):
|
||||
non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645"
|
||||
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
delete_provider(tenant_id, non_existent_pk)
|
||||
delete_provider(non_existent_pk)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
import os
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
from tasks.jobs.export import (
|
||||
_compress_output_files,
|
||||
_generate_output_directory,
|
||||
_upload_to_s3,
|
||||
get_s3_client,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestOutputs:
|
||||
def test_compress_output_files_creates_zip(self, tmpdir):
|
||||
base_tmp = Path(str(tmpdir.mkdir("compress_output")))
|
||||
output_dir = base_tmp / "output"
|
||||
output_dir.mkdir()
|
||||
file_path = output_dir / "result.csv"
|
||||
file_path.write_text("data")
|
||||
|
||||
zip_path = _compress_output_files(str(output_dir))
|
||||
|
||||
assert zip_path.endswith(".zip")
|
||||
assert os.path.exists(zip_path)
|
||||
with zipfile.ZipFile(zip_path, "r") as zipf:
|
||||
assert "output/result.csv" in zipf.namelist()
|
||||
|
||||
@patch("tasks.jobs.export.boto3.client")
|
||||
@patch("tasks.jobs.export.settings")
|
||||
def test_get_s3_client_success(self, mock_settings, mock_boto_client):
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID = "test"
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = "test"
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = "token"
|
||||
mock_settings.DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = "eu-west-1"
|
||||
|
||||
client_mock = MagicMock()
|
||||
mock_boto_client.return_value = client_mock
|
||||
|
||||
client = get_s3_client()
|
||||
assert client is not None
|
||||
client_mock.list_buckets.assert_called()
|
||||
|
||||
@patch("tasks.jobs.export.boto3.client")
|
||||
@patch("tasks.jobs.export.settings")
|
||||
def test_get_s3_client_fallback(self, mock_settings, mock_boto_client):
|
||||
mock_boto_client.side_effect = [
|
||||
ClientError({"Error": {"Code": "403"}}, "ListBuckets"),
|
||||
MagicMock(),
|
||||
]
|
||||
client = get_s3_client()
|
||||
assert client is not None
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@patch("tasks.jobs.export.base")
|
||||
def test_upload_to_s3_success(self, mock_base, mock_get_client, tmpdir):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = "test-bucket"
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("upload_success")))
|
||||
zip_path = base_tmp / "outputs.zip"
|
||||
zip_path.write_bytes(b"dummy")
|
||||
|
||||
compliance_dir = base_tmp / "compliance"
|
||||
compliance_dir.mkdir()
|
||||
(compliance_dir / "report.csv").write_text("ok")
|
||||
|
||||
client_mock = MagicMock()
|
||||
mock_get_client.return_value = client_mock
|
||||
|
||||
result = _upload_to_s3("tenant-id", str(zip_path), "scan-id")
|
||||
|
||||
expected_uri = "s3://test-bucket/tenant-id/scan-id/outputs.zip"
|
||||
assert result == expected_uri
|
||||
assert client_mock.upload_file.call_count == 2
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@patch("tasks.jobs.export.base")
|
||||
def test_upload_to_s3_missing_bucket(self, mock_base, mock_get_client):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = ""
|
||||
result = _upload_to_s3("tenant", "/tmp/fake.zip", "scan")
|
||||
assert result is None
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@patch("tasks.jobs.export.base")
|
||||
def test_upload_to_s3_skips_non_files(self, mock_base, mock_get_client, tmpdir):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = "test-bucket"
|
||||
base_tmp = Path(str(tmpdir.mkdir("upload_skips_non_files")))
|
||||
|
||||
zip_path = base_tmp / "results.zip"
|
||||
zip_path.write_bytes(b"zip")
|
||||
|
||||
compliance_dir = base_tmp / "compliance"
|
||||
compliance_dir.mkdir()
|
||||
(compliance_dir / "subdir").mkdir()
|
||||
|
||||
client_mock = MagicMock()
|
||||
mock_get_client.return_value = client_mock
|
||||
|
||||
result = _upload_to_s3("tenant", str(zip_path), "scan")
|
||||
|
||||
expected_uri = "s3://test-bucket/tenant/scan/results.zip"
|
||||
assert result == expected_uri
|
||||
client_mock.upload_file.assert_called_once()
|
||||
|
||||
@patch(
|
||||
"tasks.jobs.export.get_s3_client",
|
||||
side_effect=ClientError({"Error": {}}, "Upload"),
|
||||
)
|
||||
@patch("tasks.jobs.export.base")
|
||||
@patch("tasks.jobs.export.logger.error")
|
||||
def test_upload_to_s3_failure_logs_error(
|
||||
self, mock_logger, mock_base, mock_get_client, tmpdir
|
||||
):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = "bucket"
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("upload_failure_logs")))
|
||||
zip_path = base_tmp / "zipfile.zip"
|
||||
zip_path.write_bytes(b"zip")
|
||||
|
||||
compliance_dir = base_tmp / "compliance"
|
||||
compliance_dir.mkdir()
|
||||
(compliance_dir / "report.csv").write_text("csv")
|
||||
|
||||
_upload_to_s3("tenant", str(zip_path), "scan")
|
||||
mock_logger.assert_called()
|
||||
|
||||
def test_generate_output_directory_creates_paths(self, tmpdir):
|
||||
from prowler.config.config import output_file_timestamp
|
||||
|
||||
base_tmp = Path(str(tmpdir.mkdir("generate_output")))
|
||||
base_dir = str(base_tmp)
|
||||
tenant_id = "t1"
|
||||
scan_id = "s1"
|
||||
provider = "aws"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
|
||||
assert path.endswith(f"{provider}-{output_file_timestamp}")
|
||||
assert compliance.endswith(f"{provider}-{output_file_timestamp}")
|
||||
@@ -1,6 +1,4 @@
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +7,6 @@ from tasks.jobs.scan import (
|
||||
_store_resources,
|
||||
perform_prowler_scan,
|
||||
)
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.models import (
|
||||
Finding,
|
||||
@@ -110,13 +107,7 @@ class TestPerformScan:
|
||||
finding.service_name = "service_name"
|
||||
finding.resource_type = "resource_type"
|
||||
finding.resource_tags = {"tag1": "value1", "tag2": "value2"}
|
||||
finding.muted = False
|
||||
finding.raw = {}
|
||||
finding.resource_metadata = {"test": "metadata"}
|
||||
finding.resource_details = {"details": "test"}
|
||||
finding.partition = "partition"
|
||||
finding.muted = True
|
||||
finding.compliance = {"compliance1": "PASS"}
|
||||
|
||||
# Mock the ProwlerScan instance
|
||||
mock_prowler_scan_instance = MagicMock()
|
||||
@@ -154,8 +145,6 @@ class TestPerformScan:
|
||||
assert scan_finding.severity == finding.severity
|
||||
assert scan_finding.check_id == finding.check_id
|
||||
assert scan_finding.raw_result == finding.raw
|
||||
assert scan_finding.muted
|
||||
assert scan_finding.compliance == finding.compliance
|
||||
|
||||
assert scan_resource.tenant == tenant
|
||||
assert scan_resource.uid == finding.resource_uid
|
||||
@@ -163,11 +152,6 @@ class TestPerformScan:
|
||||
assert scan_resource.service == finding.service_name
|
||||
assert scan_resource.type == finding.resource_type
|
||||
assert scan_resource.name == finding.resource_name
|
||||
assert scan_resource.metadata == json.dumps(
|
||||
finding.resource_metadata, cls=CustomEncoder
|
||||
)
|
||||
assert scan_resource.details == f"{finding.resource_details}"
|
||||
assert scan_resource.partition == finding.partition
|
||||
|
||||
# Assert that the resource tags have been created and associated
|
||||
tags = scan_resource.tags.all()
|
||||
@@ -207,10 +191,6 @@ class TestPerformScan:
|
||||
scan.refresh_from_db()
|
||||
assert scan.state == StateChoices.FAILED
|
||||
|
||||
provider.refresh_from_db()
|
||||
assert provider.connected is False
|
||||
assert isinstance(provider.connection_last_checked_at, datetime)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"last_status, new_status, expected_delta",
|
||||
[
|
||||
|
||||
@@ -1,415 +0,0 @@
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from tasks.tasks import generate_outputs
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateOutputs:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
|
||||
def test_no_findings_returns_early(self):
|
||||
with patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
result = generate_outputs(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
mock_filter.assert_called_once_with(scan_id=self.scan_id)
|
||||
|
||||
@patch("tasks.tasks.rmtree")
|
||||
@patch("tasks.tasks._upload_to_s3")
|
||||
@patch("tasks.tasks._compress_output_files")
|
||||
@patch("tasks.tasks.get_compliance_frameworks")
|
||||
@patch("tasks.tasks.Compliance.get_bulk")
|
||||
@patch("tasks.tasks.initialize_prowler_provider")
|
||||
@patch("tasks.tasks.Provider.objects.get")
|
||||
@patch("tasks.tasks.ScanSummary.objects.filter")
|
||||
@patch("tasks.tasks.Finding.all_objects.filter")
|
||||
def test_generate_outputs_happy_path(
|
||||
self,
|
||||
mock_finding_filter,
|
||||
mock_scan_summary_filter,
|
||||
mock_provider_get,
|
||||
mock_initialize_provider,
|
||||
mock_compliance_get_bulk,
|
||||
mock_get_available_frameworks,
|
||||
mock_compress,
|
||||
mock_upload,
|
||||
mock_rmtree,
|
||||
):
|
||||
mock_scan_summary_filter.return_value.exists.return_value = True
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
prowler_provider = MagicMock()
|
||||
mock_initialize_provider.return_value = prowler_provider
|
||||
|
||||
mock_compliance_get_bulk.return_value = {"cis": MagicMock()}
|
||||
mock_get_available_frameworks.return_value = ["cis"]
|
||||
|
||||
dummy_finding = MagicMock(uid="f1")
|
||||
mock_finding_filter.return_value.order_by.return_value.iterator.return_value = [
|
||||
[dummy_finding],
|
||||
True,
|
||||
]
|
||||
|
||||
mock_transformed_stats = {"some": "stats"}
|
||||
with (
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput._transform_findings_stats",
|
||||
return_value=mock_transformed_stats,
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput.transform_api_finding",
|
||||
return_value={"transformed": "f1"},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": MagicMock(name="JSONWriter"),
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock(name="CSVCompliance"))]},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("out-dir", "comp-dir"),
|
||||
),
|
||||
patch("tasks.tasks.Scan.all_objects.filter") as mock_scan_update,
|
||||
):
|
||||
mock_compress.return_value = "/tmp/zipped.zip"
|
||||
mock_upload.return_value = "s3://bucket/zipped.zip"
|
||||
|
||||
result = generate_outputs(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
mock_scan_update.return_value.update.assert_called_once_with(
|
||||
output_location="s3://bucket/zipped.zip"
|
||||
)
|
||||
mock_rmtree.assert_called_once_with(
|
||||
Path("/tmp/zipped.zip").parent, ignore_errors=True
|
||||
)
|
||||
|
||||
def test_generate_outputs_fails_upload(self):
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk"),
|
||||
patch("tasks.tasks.get_compliance_frameworks"),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch("tasks.tasks.FindingOutput.transform_api_finding"),
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": MagicMock(name="Writer"),
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
patch("tasks.tasks._compress_output_files", return_value="/tmp/compressed"),
|
||||
patch("tasks.tasks._upload_to_s3", return_value=None),
|
||||
patch("tasks.tasks.Scan.all_objects.filter") as mock_scan_update,
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_findings.return_value.order_by.return_value.iterator.return_value = [
|
||||
[MagicMock()],
|
||||
True,
|
||||
]
|
||||
|
||||
result = generate_outputs(
|
||||
scan_id="scan",
|
||||
provider_id="provider",
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
mock_scan_update.return_value.update.assert_called_once()
|
||||
|
||||
def test_generate_outputs_triggers_html_extra_update(self):
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput._transform_findings_stats",
|
||||
return_value={"some": "stats"},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput.transform_api_finding",
|
||||
return_value=mock_finding_output,
|
||||
),
|
||||
patch("tasks.tasks._compress_output_files", return_value="/tmp/compressed"),
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/f.zip"),
|
||||
patch("tasks.tasks.Scan.all_objects.filter"),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_findings.return_value.order_by.return_value.iterator.return_value = [
|
||||
[MagicMock()],
|
||||
True,
|
||||
]
|
||||
|
||||
html_writer_mock = MagicMock()
|
||||
with (
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"html": {
|
||||
"class": lambda *args, **kwargs: html_writer_mock,
|
||||
"suffix": ".html",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
generate_outputs(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
html_writer_mock.batch_write_data_to_file.assert_called_once()
|
||||
|
||||
def test_transform_called_only_on_second_batch(self):
|
||||
raw1 = MagicMock()
|
||||
raw2 = MagicMock()
|
||||
|
||||
tf1 = MagicMock()
|
||||
tf1.compliance = {}
|
||||
tf2 = MagicMock()
|
||||
tf2.compliance = {}
|
||||
|
||||
writer_instances = []
|
||||
|
||||
class TrackingWriter:
|
||||
def __init__(self, findings, file_path, file_extension, from_cli):
|
||||
self.transform_called = 0
|
||||
self.batch_write_data_to_file = MagicMock()
|
||||
self._data = []
|
||||
self.close_file = False
|
||||
writer_instances.append(self)
|
||||
|
||||
def transform(self, fos):
|
||||
self.transform_called += 1
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_summary,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk"),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=[]),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput.transform_api_finding",
|
||||
side_effect=[tf1, tf2],
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("outdir", "compdir"),
|
||||
),
|
||||
patch("tasks.tasks._compress_output_files", return_value="outdir.zip"),
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/outdir.zip"),
|
||||
patch("tasks.tasks.rmtree"),
|
||||
patch("tasks.tasks.Scan.all_objects.filter"),
|
||||
patch(
|
||||
"tasks.tasks.batched",
|
||||
return_value=[
|
||||
([raw1], False),
|
||||
([raw2], True),
|
||||
],
|
||||
),
|
||||
):
|
||||
mock_summary.return_value.exists.return_value = True
|
||||
|
||||
with patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": TrackingWriter,
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
):
|
||||
result = generate_outputs(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
assert len(writer_instances) == 1
|
||||
writer = writer_instances[0]
|
||||
assert writer.transform_called == 1
|
||||
|
||||
def test_compliance_transform_called_on_second_batch(self):
|
||||
raw1 = MagicMock()
|
||||
raw2 = MagicMock()
|
||||
compliance_obj = MagicMock()
|
||||
writer_instances = []
|
||||
|
||||
class TrackingComplianceWriter:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.transform_calls = []
|
||||
self._data = []
|
||||
writer_instances.append(self)
|
||||
|
||||
def transform(self, fos, comp_obj, name):
|
||||
self.transform_calls.append((fos, comp_obj, name))
|
||||
|
||||
def batch_write_data_to_file(self):
|
||||
pass
|
||||
|
||||
two_batches = [
|
||||
([raw1], False),
|
||||
([raw2], True),
|
||||
]
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_summary,
|
||||
patch(
|
||||
"tasks.tasks.Provider.objects.get",
|
||||
return_value=MagicMock(uid="UID", provider="aws"),
|
||||
),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch(
|
||||
"tasks.tasks.Compliance.get_bulk", return_value={"cis": compliance_obj}
|
||||
),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("outdir", "compdir"),
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput.transform_api_finding",
|
||||
side_effect=lambda f, prov: f,
|
||||
),
|
||||
patch("tasks.tasks._compress_output_files", return_value="outdir.zip"),
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/outdir.zip"),
|
||||
patch("tasks.tasks.rmtree"),
|
||||
patch(
|
||||
"tasks.tasks.Scan.all_objects.filter",
|
||||
return_value=MagicMock(update=lambda **kw: None),
|
||||
),
|
||||
patch("tasks.tasks.batched", return_value=two_batches),
|
||||
patch("tasks.tasks.OUTPUT_FORMATS_MAPPING", {}),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda name: True, TrackingComplianceWriter)]},
|
||||
),
|
||||
):
|
||||
mock_summary.return_value.exists.return_value = True
|
||||
|
||||
result = generate_outputs(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
|
||||
assert len(writer_instances) == 1
|
||||
writer = writer_instances[0]
|
||||
assert writer.transform_calls == [([raw2], compliance_obj, "cis")]
|
||||
assert result == {"upload": True}
|
||||
|
||||
def test_generate_outputs_logs_rmtree_exception(self, caplog):
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput._transform_findings_stats",
|
||||
return_value={"some": "stats"},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput.transform_api_finding",
|
||||
return_value=mock_finding_output,
|
||||
),
|
||||
patch("tasks.tasks._compress_output_files", return_value="/tmp/compressed"),
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/file.zip"),
|
||||
patch("tasks.tasks.Scan.all_objects.filter"),
|
||||
patch("tasks.tasks.rmtree", side_effect=Exception("Test deletion error")),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_findings.return_value.order_by.return_value.iterator.return_value = [
|
||||
[MagicMock()],
|
||||
True,
|
||||
]
|
||||
|
||||
with (
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": lambda *args, **kwargs: MagicMock(),
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_outputs(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
@@ -1,32 +1,9 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from enum import Enum
|
||||
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.models import TaskResult
|
||||
|
||||
|
||||
class CustomEncoder(json.JSONEncoder):
|
||||
def default(self, o):
|
||||
# Enum serialization
|
||||
if isinstance(o, Enum):
|
||||
return o.value
|
||||
# Datetime and timedelta serialization
|
||||
if isinstance(o, datetime):
|
||||
return o.isoformat(timespec="seconds")
|
||||
if isinstance(o, timedelta):
|
||||
return o.total_seconds()
|
||||
|
||||
# Custom object serialization
|
||||
try:
|
||||
return super().default(o)
|
||||
except TypeError:
|
||||
try:
|
||||
return o.__dict__
|
||||
except AttributeError:
|
||||
return str(o)
|
||||
|
||||
|
||||
def get_next_execution_datetime(task_id: int, provider_id: str) -> datetime:
|
||||
task_instance = TaskResult.objects.get(task_id=task_id)
|
||||
try:
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import pandas as pd
|
||||
|
||||
plt.style.use("ggplot")
|
||||
|
||||
|
||||
def run_locust(
|
||||
locust_file: str,
|
||||
host: str,
|
||||
users: int,
|
||||
hatch_rate: int,
|
||||
run_time: str,
|
||||
csv_prefix: Path,
|
||||
) -> Path:
|
||||
artifacts_dir = Path("artifacts")
|
||||
artifacts_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
cmd = [
|
||||
"locust",
|
||||
"-f",
|
||||
f"scenarios/{locust_file}",
|
||||
"--headless",
|
||||
"-u",
|
||||
str(users),
|
||||
"-r",
|
||||
str(hatch_rate),
|
||||
"-t",
|
||||
run_time,
|
||||
"--host",
|
||||
host,
|
||||
"--csv",
|
||||
str(artifacts_dir / csv_prefix.name),
|
||||
]
|
||||
print(f"Running Locust: {' '.join(cmd)}")
|
||||
process = subprocess.run(cmd)
|
||||
if process.returncode:
|
||||
sys.exit("Locust execution failed")
|
||||
|
||||
stats_file = artifacts_dir / f"{csv_prefix.stem}_stats.csv"
|
||||
if not stats_file.exists():
|
||||
sys.exit(f"Stats CSV not found: {stats_file}")
|
||||
return stats_file
|
||||
|
||||
|
||||
def load_percentiles(csv_path: Path) -> pd.DataFrame:
|
||||
df = pd.read_csv(csv_path)
|
||||
mapping = {"50%": "p50", "75%": "p75", "90%": "p90", "95%": "p95"}
|
||||
available = [col for col in mapping if col in df.columns]
|
||||
renamed = {col: mapping[col] for col in available}
|
||||
df = df.rename(columns=renamed).set_index("Name")[renamed.values()]
|
||||
return df.drop(index=["Aggregated"], errors="ignore")
|
||||
|
||||
|
||||
def sanitize_label(label: str) -> str:
|
||||
text = re.sub(r"[^\w]+", "_", label.strip().lower())
|
||||
return text.strip("_")
|
||||
|
||||
|
||||
def plot_multi_comparison(metrics: dict[str, pd.DataFrame]) -> None:
|
||||
common = sorted(set.intersection(*(set(df.index) for df in metrics.values())))
|
||||
percentiles = list(next(iter(metrics.values())).columns)
|
||||
groups = len(metrics)
|
||||
width = 0.8 / groups
|
||||
|
||||
for endpoint in common:
|
||||
fig, ax = plt.subplots(figsize=(10, 5), dpi=100)
|
||||
for idx, (label, df) in enumerate(metrics.items()):
|
||||
series = df.loc[endpoint]
|
||||
positions = [
|
||||
i + (idx - groups / 2) * width + width / 2
|
||||
for i in range(len(percentiles))
|
||||
]
|
||||
bars = ax.bar(positions, series.values, width, label=label)
|
||||
for bar in bars:
|
||||
height = bar.get_height()
|
||||
ax.annotate(
|
||||
f"{int(height)}",
|
||||
xy=(bar.get_x() + bar.get_width() / 2, height),
|
||||
xytext=(0, 3),
|
||||
textcoords="offset points",
|
||||
ha="center",
|
||||
va="bottom",
|
||||
fontsize=8,
|
||||
)
|
||||
|
||||
ax.set_xticks(range(len(percentiles)))
|
||||
ax.set_xticklabels(percentiles)
|
||||
ax.set_ylabel("Latency (ms)")
|
||||
ax.set_title(endpoint, fontsize=12)
|
||||
ax.grid(True, axis="y", linestyle="--", alpha=0.7)
|
||||
|
||||
fig.tight_layout()
|
||||
fig.subplots_adjust(right=0.75)
|
||||
ax.legend(loc="center left", bbox_to_anchor=(1, 0.5), framealpha=0.9)
|
||||
|
||||
output = Path("artifacts") / f"comparison_{sanitize_label(endpoint)}.png"
|
||||
plt.savefig(output)
|
||||
plt.close(fig)
|
||||
print(f"Saved chart: {output}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Run Locust and compare metrics")
|
||||
parser.add_argument("--locustfile", required=True, help="Locust file in scenarios/")
|
||||
parser.add_argument("--host", required=True, help="Target host URL")
|
||||
parser.add_argument(
|
||||
"--users", type=int, default=10, help="Number of simulated users"
|
||||
)
|
||||
parser.add_argument("--rate", type=int, default=1, help="Hatch rate per second")
|
||||
parser.add_argument("--time", default="1m", help="Test duration (e.g. 30s, 1m)")
|
||||
parser.add_argument(
|
||||
"--metrics-dir", default="baselines", help="Directory with CSV baselines"
|
||||
)
|
||||
parser.add_argument("--version", default="current", help="Test version")
|
||||
args = parser.parse_args()
|
||||
|
||||
metrics_dir = Path(args.metrics_dir)
|
||||
os.makedirs(metrics_dir, exist_ok=True)
|
||||
|
||||
metrics_data: dict[str, pd.DataFrame] = {}
|
||||
for csv_file in sorted(metrics_dir.glob("*.csv")):
|
||||
metrics_data[csv_file.stem] = load_percentiles(csv_file)
|
||||
|
||||
current_prefix = Path(args.version)
|
||||
current_csv = run_locust(
|
||||
locust_file=args.locustfile,
|
||||
host=args.host,
|
||||
users=args.users,
|
||||
hatch_rate=args.rate,
|
||||
run_time=args.time,
|
||||
csv_prefix=current_prefix,
|
||||
)
|
||||
metrics_data[args.version] = load_percentiles(current_csv)
|
||||
|
||||
for endpoint in sorted(
|
||||
set.intersection(*(set(df.index) for df in metrics_data.values()))
|
||||
):
|
||||
parts = [endpoint]
|
||||
for label, df in metrics_data.items():
|
||||
s = df.loc[endpoint]
|
||||
parts.append(f"{label}: p50 {s.p50}, p75 {s.p75}, p90 {s.p90}, p95 {s.p95}")
|
||||
print(" | ".join(parts))
|
||||
|
||||
plot_multi_comparison(metrics_data)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,3 +0,0 @@
|
||||
locust==2.34.1
|
||||
matplotlib==3.10.1
|
||||
pandas==2.2.3
|
||||
@@ -1,216 +0,0 @@
|
||||
from locust import events, task
|
||||
from utils.config import (
|
||||
FINDINGS_UI_SORT_VALUES,
|
||||
L_PROVIDER_NAME,
|
||||
M_PROVIDER_NAME,
|
||||
S_PROVIDER_NAME,
|
||||
TARGET_INSERTED_AT,
|
||||
)
|
||||
from utils.helpers import (
|
||||
APIUserBase,
|
||||
get_api_token,
|
||||
get_auth_headers,
|
||||
get_next_resource_filter,
|
||||
get_resource_filters_pairs,
|
||||
get_scan_id_from_provider_name,
|
||||
get_sort_value,
|
||||
)
|
||||
|
||||
GLOBAL = {
|
||||
"token": None,
|
||||
"scan_ids": {},
|
||||
"resource_filters": None,
|
||||
"large_resource_filters": None,
|
||||
}
|
||||
|
||||
|
||||
@events.test_start.add_listener
|
||||
def on_test_start(environment, **kwargs):
|
||||
GLOBAL["token"] = get_api_token(environment.host)
|
||||
|
||||
GLOBAL["scan_ids"]["small"] = get_scan_id_from_provider_name(
|
||||
environment.host, GLOBAL["token"], S_PROVIDER_NAME
|
||||
)
|
||||
GLOBAL["scan_ids"]["medium"] = get_scan_id_from_provider_name(
|
||||
environment.host, GLOBAL["token"], M_PROVIDER_NAME
|
||||
)
|
||||
GLOBAL["scan_ids"]["large"] = get_scan_id_from_provider_name(
|
||||
environment.host, GLOBAL["token"], L_PROVIDER_NAME
|
||||
)
|
||||
|
||||
GLOBAL["resource_filters"] = get_resource_filters_pairs(
|
||||
environment.host, GLOBAL["token"]
|
||||
)
|
||||
GLOBAL["large_resource_filters"] = get_resource_filters_pairs(
|
||||
environment.host, GLOBAL["token"], GLOBAL["scan_ids"]["large"]
|
||||
)
|
||||
|
||||
|
||||
class APIUser(APIUserBase):
|
||||
def on_start(self):
|
||||
self.token = GLOBAL["token"]
|
||||
self.s_scan_id = GLOBAL["scan_ids"]["small"]
|
||||
self.m_scan_id = GLOBAL["scan_ids"]["medium"]
|
||||
self.l_scan_id = GLOBAL["scan_ids"]["large"]
|
||||
self.available_resource_filters = GLOBAL["resource_filters"]
|
||||
self.available_resource_filters_large_scan = GLOBAL["large_resource_filters"]
|
||||
|
||||
@task
|
||||
def findings_default(self):
|
||||
name = "/findings"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/findings?page[number]={page_number}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&filter[inserted_at]={TARGET_INSERTED_AT}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def findings_default_include(self):
|
||||
name = "/findings?include"
|
||||
page = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/findings?page[number]={page}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&filter[inserted_at]={TARGET_INSERTED_AT}"
|
||||
f"&include=scan.provider,resources"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def findings_metadata(self):
|
||||
endpoint = f"/findings/metadata?" f"filter[inserted_at]={TARGET_INSERTED_AT}"
|
||||
self.client.get(
|
||||
endpoint, headers=get_auth_headers(self.token), name="/findings/metadata"
|
||||
)
|
||||
|
||||
@task
|
||||
def findings_scan_small(self):
|
||||
name = "/findings?filter[scan_id] - 50k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/findings?page[number]={page_number}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&filter[scan]={self.s_scan_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def findings_metadata_scan_small(self):
|
||||
endpoint = f"/findings/metadata?" f"&filter[scan]={self.s_scan_id}"
|
||||
self.client.get(
|
||||
endpoint,
|
||||
headers=get_auth_headers(self.token),
|
||||
name="/findings/metadata?filter[scan_id] - 50k",
|
||||
)
|
||||
|
||||
@task(2)
|
||||
def findings_scan_medium(self):
|
||||
name = "/findings?filter[scan_id] - 250k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/findings?page[number]={page_number}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&filter[scan]={self.m_scan_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def findings_metadata_scan_medium(self):
|
||||
endpoint = f"/findings/metadata?" f"&filter[scan]={self.m_scan_id}"
|
||||
self.client.get(
|
||||
endpoint,
|
||||
headers=get_auth_headers(self.token),
|
||||
name="/findings/metadata?filter[scan_id] - 250k",
|
||||
)
|
||||
|
||||
@task
|
||||
def findings_scan_large(self):
|
||||
name = "/findings?filter[scan_id] - 500k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/findings?page[number]={page_number}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&filter[scan]={self.l_scan_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def findings_scan_large_include(self):
|
||||
name = "/findings?filter[scan_id]&include - 500k"
|
||||
page_number = self._next_page(name)
|
||||
endpoint = (
|
||||
f"/findings?page[number]={page_number}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&filter[scan]={self.l_scan_id}"
|
||||
f"&include=scan.provider,resources"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def findings_metadata_scan_large(self):
|
||||
endpoint = f"/findings/metadata?" f"&filter[scan]={self.l_scan_id}"
|
||||
self.client.get(
|
||||
endpoint,
|
||||
headers=get_auth_headers(self.token),
|
||||
name="/findings/metadata?filter[scan_id] - 500k",
|
||||
)
|
||||
|
||||
@task(2)
|
||||
def findings_resource_filter(self):
|
||||
name = "/findings?filter[resource_filter]&include"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/findings?filter[{filter_name}]={filter_value}"
|
||||
f"&filter[inserted_at]={TARGET_INSERTED_AT}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&include=scan.provider,resources"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def findings_metadata_resource_filter(self):
|
||||
name = "/findings/metadata?filter[resource_filter]"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/findings/metadata?filter[{filter_name}]={filter_value}"
|
||||
f"&filter[inserted_at]={TARGET_INSERTED_AT}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(3)
|
||||
def findings_metadata_resource_filter_scan_large(self):
|
||||
name = "/findings/metadata?filter[resource_filter]&filter[scan_id] - 500k"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/findings/metadata?filter[{filter_name}]={filter_value}"
|
||||
f"&filter[scan]={self.l_scan_id}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(2)
|
||||
def findings_resource_filter_large_scan_include(self):
|
||||
name = "/findings?filter[resource_filter][scan]&include - 500k"
|
||||
filter_name, filter_value = get_next_resource_filter(
|
||||
self.available_resource_filters
|
||||
)
|
||||
|
||||
endpoint = (
|
||||
f"/findings?filter[{filter_name}]={filter_value}"
|
||||
f"&{get_sort_value(FINDINGS_UI_SORT_VALUES)}"
|
||||
f"&filter[scan]={self.l_scan_id}"
|
||||
f"&include=scan.provider,resources"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
@@ -1,19 +0,0 @@
|
||||
import os
|
||||
|
||||
USER_EMAIL = os.environ.get("USER_EMAIL")
|
||||
USER_PASSWORD = os.environ.get("USER_PASSWORD")
|
||||
|
||||
BASE_HEADERS = {"Content-Type": "application/vnd.api+json"}
|
||||
|
||||
FINDINGS_UI_SORT_VALUES = ["severity", "status", "-inserted_at"]
|
||||
TARGET_INSERTED_AT = os.environ.get("TARGET_INSERTED_AT", "2025-04-22")
|
||||
|
||||
FINDINGS_RESOURCE_METADATA = {
|
||||
"regions": "region",
|
||||
"resource_types": "resource_type",
|
||||
"services": "service",
|
||||
}
|
||||
|
||||
S_PROVIDER_NAME = "provider-50k"
|
||||
M_PROVIDER_NAME = "provider-250k"
|
||||
L_PROVIDER_NAME = "provider-500k"
|
||||
@@ -1,168 +0,0 @@
|
||||
import random
|
||||
from collections import defaultdict
|
||||
from threading import Lock
|
||||
|
||||
import requests
|
||||
from locust import HttpUser, between
|
||||
from utils.config import (
|
||||
BASE_HEADERS,
|
||||
FINDINGS_RESOURCE_METADATA,
|
||||
TARGET_INSERTED_AT,
|
||||
USER_EMAIL,
|
||||
USER_PASSWORD,
|
||||
)
|
||||
|
||||
_global_page_counters = defaultdict(int)
|
||||
_page_lock = Lock()
|
||||
|
||||
|
||||
class APIUserBase(HttpUser):
|
||||
"""
|
||||
Base class for API user simulation in Locust performance tests.
|
||||
|
||||
Attributes:
|
||||
abstract (bool): Indicates this is an abstract user class.
|
||||
wait_time: Time between task executions, randomized between 1 and 5 seconds.
|
||||
"""
|
||||
|
||||
abstract = True
|
||||
wait_time = between(1, 5)
|
||||
|
||||
def _next_page(self, endpoint_name: str) -> int:
|
||||
"""
|
||||
Returns the next page number for a given endpoint. Thread-safe.
|
||||
|
||||
Args:
|
||||
endpoint_name (str): Name of the API endpoint being paginated.
|
||||
|
||||
Returns:
|
||||
int: The next page number for the given endpoint.
|
||||
"""
|
||||
with _page_lock:
|
||||
_global_page_counters[endpoint_name] += 1
|
||||
return _global_page_counters[endpoint_name]
|
||||
|
||||
|
||||
def get_next_resource_filter(available_values: dict) -> tuple:
|
||||
"""
|
||||
Randomly selects a filter type and value from available options.
|
||||
|
||||
Args:
|
||||
available_values (dict): Dictionary with filter types as keys and list of possible values.
|
||||
|
||||
Returns:
|
||||
tuple: A (filter_type, filter_value) pair randomly selected.
|
||||
"""
|
||||
filter_type = random.choice(list(available_values.keys()))
|
||||
filter_value = random.choice(available_values[filter_type])
|
||||
return filter_type, filter_value
|
||||
|
||||
|
||||
def get_auth_headers(token: str) -> dict:
|
||||
"""
|
||||
Returns the headers for the API requests.
|
||||
|
||||
Args:
|
||||
token (str): The token to be included in the headers.
|
||||
|
||||
Returns:
|
||||
dict: The headers for the API requests.
|
||||
"""
|
||||
return {
|
||||
"Authorization": f"Bearer {token}",
|
||||
**BASE_HEADERS,
|
||||
}
|
||||
|
||||
|
||||
def get_api_token(host: str) -> str:
|
||||
"""
|
||||
Authenticates with the API and retrieves a bearer token.
|
||||
|
||||
Args:
|
||||
host (str): The host URL of the API.
|
||||
|
||||
Returns:
|
||||
str: The access token for authenticated requests.
|
||||
|
||||
Raises:
|
||||
AssertionError: If the request fails or does not return a 200 status code.
|
||||
"""
|
||||
login_payload = {
|
||||
"data": {
|
||||
"type": "tokens",
|
||||
"attributes": {"email": USER_EMAIL, "password": USER_PASSWORD},
|
||||
}
|
||||
}
|
||||
response = requests.post(f"{host}/tokens", json=login_payload, headers=BASE_HEADERS)
|
||||
assert response.status_code == 200, f"Failed to get token: {response.text}"
|
||||
return response.json()["data"]["attributes"]["access"]
|
||||
|
||||
|
||||
def get_scan_id_from_provider_name(host: str, token: str, provider_name: str) -> str:
|
||||
"""
|
||||
Retrieves the scan ID associated with a specific provider name.
|
||||
|
||||
Args:
|
||||
host (str): The host URL of the API.
|
||||
token (str): Bearer token for authentication.
|
||||
provider_name (str): Name of the provider to filter scans by.
|
||||
|
||||
Returns:
|
||||
str: The ID of the scan.
|
||||
|
||||
Raises:
|
||||
AssertionError: If the request fails or does not return a 200 status code.
|
||||
"""
|
||||
response = requests.get(
|
||||
f"{host}/scans?fields[scans]=id&filter[provider_alias]={provider_name}",
|
||||
headers=get_auth_headers(token),
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to get scan: {response.text}"
|
||||
return response.json()["data"][0]["id"]
|
||||
|
||||
|
||||
def get_resource_filters_pairs(host: str, token: str, scan_id: str = "") -> dict:
|
||||
"""
|
||||
Retrieves and maps resource metadata filter values from the findings endpoint.
|
||||
|
||||
Args:
|
||||
host (str): The host URL of the API.
|
||||
token (str): Bearer token for authentication.
|
||||
scan_id (str, optional): Optional scan ID to filter metadata. Defaults to using inserted_at timestamp.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary of resource filter metadata.
|
||||
|
||||
Raises:
|
||||
AssertionError: If the request fails or does not return a 200 status code.
|
||||
"""
|
||||
metadata_filters = (
|
||||
f"filter[scan]={scan_id}"
|
||||
if scan_id
|
||||
else f"filter[inserted_at]={TARGET_INSERTED_AT}"
|
||||
)
|
||||
response = requests.get(
|
||||
f"{host}/findings/metadata?{metadata_filters}", headers=get_auth_headers(token)
|
||||
)
|
||||
assert (
|
||||
response.status_code == 200
|
||||
), f"Failed to get resource filters values: {response.text}"
|
||||
attributes = response.json()["data"]["attributes"]
|
||||
return {
|
||||
FINDINGS_RESOURCE_METADATA[key]: values
|
||||
for key, values in attributes.items()
|
||||
if key in FINDINGS_RESOURCE_METADATA.keys()
|
||||
}
|
||||
|
||||
|
||||
def get_sort_value(sort_values: list) -> str:
|
||||
"""
|
||||
Constructs a sort query string from a list of sort keys.
|
||||
|
||||
Args:
|
||||
sort_values (list): The list of sort values to include in the query.
|
||||
|
||||
Returns:
|
||||
str: A formatted sort query string (e.g., "sort=created_at,-severity").
|
||||
"""
|
||||
return f"sort={','.join(sort_values)}"
|
||||
@@ -1,9 +1,6 @@
|
||||
#!/bin/bash
|
||||
# Run Prowler against All AWS Accounts in an AWS Organization
|
||||
|
||||
# Activate Poetry Environment
|
||||
eval "$(poetry env activate)"
|
||||
|
||||
# Show Prowler Version
|
||||
prowler -v
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ for accountId in $ACCOUNTS_IN_ORGS; do
|
||||
# Run Prowler
|
||||
echo -e "Assessing AWS Account: $accountId, using Role: $ROLE on $(date)"
|
||||
# remove -g cislevel for a full report and add other formats if needed
|
||||
./prowler/prowler-cli.py --role arn:"$PARTITION":iam::"$accountId":role/"$ROLE" --compliance cis_1.5_aws -M html
|
||||
./prowler/prowler.py --role arn:"$PARTITION":iam::"$accountId":role/"$ROLE" --compliance cis_1.5_aws -M html
|
||||
echo "Report stored locally at: prowler/output/ directory"
|
||||
TOTAL_SEC=$((SECONDS - START_TIME))
|
||||
echo -e "Completed AWS Account: $accountId, using Role: $ROLE on $(date)"
|
||||
|
||||
@@ -17,7 +17,7 @@ spec:
|
||||
image: toniblyx/prowler:latest
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- "./prowler-cli.py"
|
||||
- "./prowler.py"
|
||||
args: [ "-B", "$(awsS3Bucket)" ]
|
||||
env:
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
|
||||
@@ -399,6 +399,7 @@ mainConfig:
|
||||
[
|
||||
"RSA-1024",
|
||||
"P-192",
|
||||
"SHA-1",
|
||||
]
|
||||
|
||||
# AWS EKS Configuration
|
||||
|
||||
@@ -16,6 +16,7 @@ spec:
|
||||
containers:
|
||||
- name: prowler
|
||||
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
|
||||
command: ["prowler"]
|
||||
args: ["kubernetes", "-z", "-b"]
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
volumeMounts:
|
||||
|
||||
@@ -161,7 +161,7 @@ def update_nav_bar(pathname):
|
||||
html.Span(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5"),
|
||||
"Subscribe to Prowler Cloud",
|
||||
"Subscribe to prowler SaaS",
|
||||
],
|
||||
className="flex items-center gap-x-3 text-white",
|
||||
),
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 68 KiB |
5
dashboard/assets/styles/dist/output.css
vendored
5
dashboard/assets/styles/dist/output.css
vendored
@@ -1361,9 +1361,6 @@ video {
|
||||
.lg\:grid-cols-4 {
|
||||
grid-template-columns: repeat(4, minmax(0, 1fr));
|
||||
}
|
||||
.lg\:grid-cols-5 {
|
||||
grid-template-columns: repeat(5, minmax(0, 1fr));
|
||||
}
|
||||
|
||||
.lg\:justify-normal {
|
||||
justify-content: normal;
|
||||
@@ -1406,4 +1403,4 @@ video {
|
||||
.\32xl\:w-\[9\%\] {
|
||||
width: 9%;
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user