Compare commits
343 Commits
improve-de
...
5.4.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5be859d86c | ||
|
|
5340008c8f | ||
|
|
a5aa4c30d7 | ||
|
|
56d0c2fbea | ||
|
|
6b7ef199e0 | ||
|
|
933c5063ee | ||
|
|
72a998a692 | ||
|
|
a4d1e3bb69 | ||
|
|
2a8b04cced | ||
|
|
cfc02186d4 | ||
|
|
7e432a3b69 | ||
|
|
fed8de314f | ||
|
|
ce23c4b5aa | ||
|
|
33e99ef628 | ||
|
|
72761a6ef6 | ||
|
|
2a4fdff827 | ||
|
|
8e264313bc | ||
|
|
21654b0bc0 | ||
|
|
7fa57ba3e2 | ||
|
|
ea9b6bb191 | ||
|
|
95acb7b0c8 | ||
|
|
d23edfa337 | ||
|
|
40678a5863 | ||
|
|
23aded92a3 | ||
|
|
6e56d3862d | ||
|
|
d95fccd163 | ||
|
|
7ddf860a55 | ||
|
|
3f41c75a45 | ||
|
|
04b6dbf639 | ||
|
|
ff4d16deb5 | ||
|
|
562921cd5e | ||
|
|
8f061e4fed | ||
|
|
3fb86d754a | ||
|
|
7874707310 | ||
|
|
1c934e37c7 | ||
|
|
8459cff16d | ||
|
|
57ae096395 | ||
|
|
200185de25 | ||
|
|
f8447b0f79 | ||
|
|
19289bbe20 | ||
|
|
b5b371fa0c | ||
|
|
939a623cec | ||
|
|
926f449ae6 | ||
|
|
646668c6ae | ||
|
|
8e6b92792b | ||
|
|
65c081ce38 | ||
|
|
5600131d6a | ||
|
|
dbd271980f | ||
|
|
ff532a899e | ||
|
|
0c9675ec70 | ||
|
|
d45eda2b2b | ||
|
|
0abcf80d19 | ||
|
|
c0e10fd395 | ||
|
|
a80e9b26a8 | ||
|
|
2a9cd57fb8 | ||
|
|
a0ad1a5f49 | ||
|
|
dff22dd166 | ||
|
|
58138810b9 | ||
|
|
1ecc272fe4 | ||
|
|
b784167006 | ||
|
|
cf0ec8dea0 | ||
|
|
96cae5e961 | ||
|
|
a48e5cb15f | ||
|
|
5a9ff007e0 | ||
|
|
24c45f894c | ||
|
|
5d03c85629 | ||
|
|
41dc397a7a | ||
|
|
237a9adce9 | ||
|
|
a06167f1c2 | ||
|
|
a7d58c40dd | ||
|
|
e260c46389 | ||
|
|
115169a596 | ||
|
|
5b19173c1d | ||
|
|
d3dd1644e6 | ||
|
|
8ff0c59964 | ||
|
|
285939c389 | ||
|
|
a62ae8af51 | ||
|
|
5d78b9e439 | ||
|
|
1056c270ca | ||
|
|
eeef6600b7 | ||
|
|
e142f17abe | ||
|
|
a65d858dac | ||
|
|
6235a1ba41 | ||
|
|
05007d03ee | ||
|
|
102d099947 | ||
|
|
3194675a5c | ||
|
|
14e6e4aa68 | ||
|
|
b24c3665b5 | ||
|
|
1f60878867 | ||
|
|
2dd18662d8 | ||
|
|
175360dbe6 | ||
|
|
80e24b971f | ||
|
|
78877c470a | ||
|
|
9c9b100359 | ||
|
|
10f3232294 | ||
|
|
a2e5f70f36 | ||
|
|
8d8b31c757 | ||
|
|
cba1e718b9 | ||
|
|
6c3c37fc26 | ||
|
|
b610cacd0c | ||
|
|
027a5705cb | ||
|
|
b7fbfb4360 | ||
|
|
5acf0a7e3d | ||
|
|
3a25e86e30 | ||
|
|
50f1592eb3 | ||
|
|
0f2927cb88 | ||
|
|
b4e1434052 | ||
|
|
43710783f9 | ||
|
|
16f767e7b9 | ||
|
|
42818217a0 | ||
|
|
13405594b2 | ||
|
|
b5a3852334 | ||
|
|
181ff1acb3 | ||
|
|
91e59a3279 | ||
|
|
b3fad1a765 | ||
|
|
6f90927a79 | ||
|
|
0bb4a9a3e9 | ||
|
|
80d9cde60b | ||
|
|
11196c2f83 | ||
|
|
55a0d0a1b5 | ||
|
|
4e5e1d7bd4 | ||
|
|
06a0a434ab | ||
|
|
153833fc55 | ||
|
|
fc4877975f | ||
|
|
0797efd4fd | ||
|
|
fbec99a0b7 | ||
|
|
b2cb1de95e | ||
|
|
190c2316d7 | ||
|
|
f6c352281a | ||
|
|
66dfe89936 | ||
|
|
8b3942ca49 | ||
|
|
9d35213bd5 | ||
|
|
3e586e615d | ||
|
|
a4f950e093 | ||
|
|
7c2441f6ff | ||
|
|
0a92af3eb2 | ||
|
|
666f3a0e20 | ||
|
|
06ef98b5cc | ||
|
|
79125bdd40 | ||
|
|
e8e8b085ac | ||
|
|
c9b81d003a | ||
|
|
23fa3c1e38 | ||
|
|
03fbd0baca | ||
|
|
d4fe24ef47 | ||
|
|
9c5220ee98 | ||
|
|
6491bce5a6 | ||
|
|
ca375dd79c | ||
|
|
e807573b54 | ||
|
|
c0f4c9743f | ||
|
|
5974d0b5da | ||
|
|
6244a8a5f7 | ||
|
|
5b9dae4529 | ||
|
|
a424374c44 | ||
|
|
b7fc2542e8 | ||
|
|
83a1598a1e | ||
|
|
b22b56a06b | ||
|
|
5020e4713c | ||
|
|
ee534a740e | ||
|
|
48cb45b7a8 | ||
|
|
91b74822e9 | ||
|
|
287eef5085 | ||
|
|
45d359c84a | ||
|
|
6049e5d4e8 | ||
|
|
dfd377f89e | ||
|
|
37e6c52c14 | ||
|
|
d6a7f4d88f | ||
|
|
239cda0a90 | ||
|
|
4a821e425b | ||
|
|
e1a2f0c204 | ||
|
|
c70860c733 | ||
|
|
05e71e033f | ||
|
|
5164ec2eb9 | ||
|
|
be18dac4f9 | ||
|
|
bb126c242f | ||
|
|
e27780a856 | ||
|
|
196ec51751 | ||
|
|
86abf9e64c | ||
|
|
9d8be578e3 | ||
|
|
b3aa800082 | ||
|
|
501674a778 | ||
|
|
5ff6ae79d8 | ||
|
|
e518a869ab | ||
|
|
43927a62f3 | ||
|
|
335980c8d8 | ||
|
|
ca3ee378db | ||
|
|
c05bc1068a | ||
|
|
2e3164636d | ||
|
|
c34e07fc40 | ||
|
|
6022122a61 | ||
|
|
f65f5e4b46 | ||
|
|
dee17733a0 | ||
|
|
cddda1e64e | ||
|
|
f7b873db03 | ||
|
|
792bc70d0a | ||
|
|
185491b061 | ||
|
|
3af8a43480 | ||
|
|
fd78406b29 | ||
|
|
4758b258a3 | ||
|
|
015e2b3b88 | ||
|
|
e184c9cb61 | ||
|
|
9004a01183 | ||
|
|
dd65ba3d9e | ||
|
|
bba616a18f | ||
|
|
aa0f8d2981 | ||
|
|
2511d6ffa9 | ||
|
|
27329457be | ||
|
|
7189f3d526 | ||
|
|
58e7589c9d | ||
|
|
d60f4b5ded | ||
|
|
4c2ec094f6 | ||
|
|
395ecaff5b | ||
|
|
c39506ef7d | ||
|
|
eb90d479e2 | ||
|
|
b92a73f5ea | ||
|
|
ad121f3059 | ||
|
|
70e4c5a44e | ||
|
|
b5a46b7b59 | ||
|
|
f1a97cd166 | ||
|
|
0774508093 | ||
|
|
0664ce6b94 | ||
|
|
407c779c52 | ||
|
|
c60f13f23f | ||
|
|
37d912ef01 | ||
|
|
d3de89c017 | ||
|
|
cb22af25c6 | ||
|
|
a534b94495 | ||
|
|
6262b4ff0b | ||
|
|
84ecd7ab2c | ||
|
|
1a5428445a | ||
|
|
ac8e991ca0 | ||
|
|
83a0331472 | ||
|
|
cce31e2971 | ||
|
|
0adf7d6e77 | ||
|
|
295f8b557e | ||
|
|
bb2c5c3161 | ||
|
|
0018f36a36 | ||
|
|
857de84f49 | ||
|
|
9630f2242a | ||
|
|
1fe125867c | ||
|
|
0737893240 | ||
|
|
282fe3d348 | ||
|
|
b5d83640ae | ||
|
|
2823d3ad21 | ||
|
|
00b93bfe86 | ||
|
|
84c253d887 | ||
|
|
2ab5a702c9 | ||
|
|
11d9cdf24e | ||
|
|
b1de41619b | ||
|
|
18a4881a51 | ||
|
|
de76a168c0 | ||
|
|
bcc13a742d | ||
|
|
23b584f4bf | ||
|
|
074b7c1ff5 | ||
|
|
c04e9ed914 | ||
|
|
1f1b126e79 | ||
|
|
9b0dd80f13 | ||
|
|
b0807478f2 | ||
|
|
11dfa58ecd | ||
|
|
412f396e0a | ||
|
|
8100d43ff2 | ||
|
|
bc96acef48 | ||
|
|
6e9876b61a | ||
|
|
32253ca4f7 | ||
|
|
4c6be5e283 | ||
|
|
69178fd7bd | ||
|
|
cd4432c14b | ||
|
|
0e47172aec | ||
|
|
efe18ae8b2 | ||
|
|
d5e13df4fe | ||
|
|
b0e84d74f2 | ||
|
|
28526b591c | ||
|
|
51e6a6edb1 | ||
|
|
c1b132a29e | ||
|
|
e2530e7d57 | ||
|
|
f2fcb1599b | ||
|
|
160eafa0c9 | ||
|
|
c2bc0f1368 | ||
|
|
27d27fff81 | ||
|
|
66e8f0ce18 | ||
|
|
0ceae8361b | ||
|
|
5e52fded83 | ||
|
|
0a7d07b4b6 | ||
|
|
34b5f483d7 | ||
|
|
9d04a1bc52 | ||
|
|
b25c0aaa00 | ||
|
|
652b93ea45 | ||
|
|
ccb7726511 | ||
|
|
c514a4e451 | ||
|
|
d841bd6890 | ||
|
|
ff54e10ab2 | ||
|
|
718e562741 | ||
|
|
ce05e2a939 | ||
|
|
90831d3084 | ||
|
|
2c928dead5 | ||
|
|
3ffe147664 | ||
|
|
3373b0f47c | ||
|
|
b29db04560 | ||
|
|
f964a0362e | ||
|
|
537b23dfae | ||
|
|
48cf3528b4 | ||
|
|
3c4b9d32c9 | ||
|
|
fd8361ae2c | ||
|
|
33cadaa932 | ||
|
|
cc126eb8b4 | ||
|
|
c996b3f6aa | ||
|
|
e2b406a300 | ||
|
|
c2c69da603 | ||
|
|
4e51348ff2 | ||
|
|
3257b82706 | ||
|
|
c98d764daa | ||
|
|
0448429a9f | ||
|
|
b948ac6125 | ||
|
|
3acc09ea16 | ||
|
|
82d0d0de9d | ||
|
|
f9cfc4d087 | ||
|
|
7d68ff455b | ||
|
|
ddf4881971 | ||
|
|
9ad4944142 | ||
|
|
7f33ea76a4 | ||
|
|
1140c29384 | ||
|
|
2441a62f39 | ||
|
|
c26a231fc1 | ||
|
|
2fb2315037 | ||
|
|
a9e475481a | ||
|
|
826d7c4dc3 | ||
|
|
b7f4b37f66 | ||
|
|
193d691bfe | ||
|
|
a359bc581c | ||
|
|
9a28ff025a | ||
|
|
f1c7050700 | ||
|
|
9391c27b9e | ||
|
|
4c54de092f | ||
|
|
690c482a43 | ||
|
|
ad2d857c6f | ||
|
|
07ee59d2ef | ||
|
|
bec4617d0a | ||
|
|
94916f8305 | ||
|
|
44de651be3 | ||
|
|
bdcba9c642 | ||
|
|
c172f75f1a | ||
|
|
ec492fa13a | ||
|
|
702659959c | ||
|
|
fef332a591 |
17
.env
@@ -4,7 +4,7 @@
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
SITE_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
@@ -33,10 +33,10 @@ VALKEY_DB=0
|
||||
# API scan settings
|
||||
|
||||
# The path to the directory where scan output should be stored
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY = "/tmp/prowler_api_output"
|
||||
|
||||
# The maximum number of findings to process in a single batch
|
||||
DJANGO_FINDINGS_BATCH_SIZE=1000
|
||||
DJANGO_FINDINGS_BATCH_SIZE = 1000
|
||||
|
||||
# The AWS access key to be used when uploading scan output to an S3 bucket
|
||||
# If left empty, default AWS credentials resolution behavior will be used
|
||||
@@ -123,13 +123,4 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.5.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.4.3
|
||||
|
||||
63
.github/dependabot.yml
vendored
@@ -16,17 +16,16 @@ updates:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/api"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "component/api"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/api"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
@@ -38,17 +37,16 @@ updates:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "npm"
|
||||
# directory: "/ui"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "npm"
|
||||
# - "component/ui"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/ui"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "npm"
|
||||
- "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
@@ -94,18 +92,17 @@ updates:
|
||||
- "docker"
|
||||
- "v4"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# v3
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v3"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -70,18 +70,18 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
6
.github/workflows/api-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
18
.github/workflows/api-pull-request.yml
vendored
@@ -71,11 +71,11 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
@@ -90,11 +90,11 @@ jobs:
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -103,7 +103,7 @@ jobs:
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5.4.0
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -175,11 +175,11 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
|
||||
4
.github/workflows/backport.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
|
||||
2
.github/workflows/conventional-commit.yml
vendored
@@ -18,6 +18,6 @@ jobs:
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
uses: agenthunt/conventional-commit-checker-action@v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'
|
||||
|
||||
4
.github/workflows/find-secrets.yml
vendored
@@ -7,11 +7,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@ded5f45b92c00939718787ce586b520bbe795f3b # v3.88.18
|
||||
uses: trufflesecurity/trufflehog@v3.88.14
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
- uses: actions/labeler@v5
|
||||
|
||||
@@ -59,10 +59,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
@@ -108,13 +108,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -123,11 +123,11 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
6
.github/workflows/sdk-codeql.yml
vendored
@@ -50,16 +50,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
12
.github/workflows/sdk-pull-request.yml
vendored
@@ -21,11 +21,11 @@ jobs:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -46,11 +46,11 @@ jobs:
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5.4.0
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
|
||||
4
.github/workflows/sdk-pypi-release.yml
vendored
@@ -64,14 +64,14 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==1.8.5
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -50,13 +50,12 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -70,18 +70,18 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
6
.github/workflows/ui-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
10
.github/workflows/ui-pull-request.yml
vendored
@@ -27,11 +27,11 @@ jobs:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
@@ -46,11 +46,11 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
|
||||
1
.gitignore
vendored
@@ -50,7 +50,6 @@ junit-reports/
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
|
||||
@@ -59,7 +59,7 @@ repos:
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.1.1
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
@@ -68,7 +68,7 @@ repos:
|
||||
|
||||
- id: poetry-lock
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
args: ["--no-update", "--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
@@ -78,7 +78,7 @@ repos:
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
args: ["--no-update", "--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
|
||||
|
||||
24
Dockerfile
@@ -4,7 +4,7 @@ LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git gcc python3-dev musl-dev linux-headers
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
|
||||
# Create non-root user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
@@ -18,25 +18,21 @@ WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler/
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
# By default poetry does not compile Python source files to bytecode during installation.
|
||||
# This speeds up the installation process, but the first execution may take a little more
|
||||
# time because Python then compiles source files to bytecode automatically. If you want to
|
||||
# compile source files to bytecode during installation, you can use the --compile option
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
ENTRYPOINT ["prowler"]
|
||||
|
||||
10
README.md
@@ -72,9 +72,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 564 | 82 | 33 | 10 |
|
||||
| GCP | 77 | 13 | 6 | 3 |
|
||||
| Azure | 140 | 18 | 7 | 3 |
|
||||
| Kubernetes | 83 | 7 | 4 | 7 |
|
||||
| GCP | 77 | 13 | 5 | 3 |
|
||||
| Azure | 140 | 18 | 6 | 3 |
|
||||
| Kubernetes | 83 | 7 | 2 | 7 |
|
||||
| Microsoft365 | 5 | 2 | 1 | 0 |
|
||||
|
||||
> You can list the checks, services, compliance frameworks and categories with `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
@@ -109,7 +109,7 @@ docker compose up -d
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
@@ -212,7 +212,7 @@ git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
python prowler-cli.py -v
|
||||
python prowler.py -v
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
|
||||
@@ -53,3 +53,6 @@ DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""
|
||||
|
||||
# Deletion Task Batch Size
|
||||
DJANGO_DELETION_BATCH_SIZE=5000
|
||||
|
||||
@@ -4,12 +4,18 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
---
|
||||
|
||||
## [v1.6.0] (Prowler UNRELEASED)
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Added
|
||||
### Fixed
|
||||
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
|
||||
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289).
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
|
||||
|
||||
---
|
||||
|
||||
@@ -41,6 +47,6 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
- Increase the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
|
||||
---
|
||||
|
||||
@@ -21,8 +21,7 @@ COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
RUN poetry install && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
368
api/poetry.lock
generated
@@ -2,42 +2,39 @@
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.7",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra (>=2.0.8,<3.0.0)",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
|
||||
"drf-nested-routers (>=0.94.1,<1.0.0)",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10"
|
||||
]
|
||||
[tool.poetry]
|
||||
authors = ["Prowler Team"]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.6.0"
|
||||
version = "1.5.3"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
dj-rest-auth = {extras = ["with_social", "jwt"], version = "7.0.1"}
|
||||
django = "5.1.5"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
django-environ = "0.11.2"
|
||||
django-filter = "24.3"
|
||||
django-guid = "3.5.0"
|
||||
django-postgres-extra = "^2.0.8"
|
||||
djangorestframework = "3.15.2"
|
||||
djangorestframework-jsonapi = "7.0.2"
|
||||
djangorestframework-simplejwt = "^5.3.1"
|
||||
drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "v5.4"}
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
python = ">=3.11,<3.13"
|
||||
sentry-sdk = {extras = ["django"], version = "^2.20.0"}
|
||||
uuid6 = "2024.7.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
@@ -57,3 +54,6 @@ ruff = "0.5.0"
|
||||
safety = "3.2.9"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -30,10 +30,6 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = sociallogin.account.extra_data.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
|
||||
@@ -6,6 +6,7 @@ from datetime import datetime, timedelta, timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.db import connection, models, transaction
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
@@ -105,11 +106,12 @@ def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
def batch_delete(queryset, batch_size=5000):
|
||||
def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_SIZE):
|
||||
"""
|
||||
Deletes objects in batches and returns the total number of deletions and a summary.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the queryset belongs to.
|
||||
queryset (QuerySet): The queryset of objects to delete.
|
||||
batch_size (int): The number of objects to delete in each batch.
|
||||
|
||||
@@ -120,15 +122,16 @@ def batch_delete(queryset, batch_size=5000):
|
||||
deletion_summary = {}
|
||||
|
||||
while True:
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
with rls_transaction(tenant_id, POSTGRES_TENANT_VAR):
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
for model_label, count in deleted_info.items():
|
||||
@@ -137,6 +140,18 @@ def batch_delete(queryset, batch_size=5000):
|
||||
return total_deleted, deletion_summary
|
||||
|
||||
|
||||
def delete_related_daily_task(provider_id: str):
|
||||
"""
|
||||
Deletes the periodic task associated with a specific provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The unique identifier for the provider
|
||||
whose related periodic task should be deleted.
|
||||
"""
|
||||
task_name = f"scan-perform-scheduled-{provider_id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
@@ -318,15 +333,3 @@ class InvitationStateEnum(EnumType):
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Integration type
|
||||
|
||||
|
||||
class IntegrationTypeEnum(EnumType):
|
||||
enum_type_name = "integration_type"
|
||||
|
||||
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
@@ -24,7 +24,6 @@ from api.db_utils import (
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
@@ -649,19 +648,3 @@ class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
|
||||
class IntegrationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
|
||||
integration_type__in = ChoiceInFilter(
|
||||
choices=Integration.IntegrationChoices.choices,
|
||||
field_name="integration_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
|
||||
from api.models import Integration
|
||||
|
||||
IntegrationTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="integration_type",
|
||||
enum_values=tuple(
|
||||
integration_type[0]
|
||||
for integration_type in Integration.IntegrationChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0012_scan_report_output"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
IntegrationTypeEnumMigration.create_enum_type,
|
||||
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=IntegrationTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,131 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0013_integrations_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Integration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("enabled", models.BooleanField(default=False)),
|
||||
("connected", models.BooleanField(blank=True, null=True)),
|
||||
(
|
||||
"connection_last_checked_at",
|
||||
models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
(
|
||||
"integration_type",
|
||||
api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("saml", "SAML"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
("_credentials", models.BinaryField(db_column="credentials")),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={"db_table": "integrations", "abstract": False},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="integration",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IntegrationProviderRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"integration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.integration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "integration_provider_mappings",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("integration_id", "provider_id"),
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="IntegrationProviderRelationship",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integrationproviderrelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="integration",
|
||||
name="providers",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="integrations",
|
||||
through="api.IntegrationProviderRelationship",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -21,7 +21,6 @@ from uuid6 import uuid7
|
||||
from api.db_utils import (
|
||||
CustomUserManager,
|
||||
FindingDeltaEnumField,
|
||||
IntegrationTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
MemberRoleEnumField,
|
||||
ProviderEnumField,
|
||||
@@ -1139,80 +1138,3 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scan-summaries"
|
||||
|
||||
|
||||
class Integration(RowLevelSecurityProtectedModel):
|
||||
class IntegrationChoices(models.TextChoices):
|
||||
S3 = "amazon_s3", _("Amazon S3")
|
||||
SAML = "saml", _("SAML")
|
||||
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
|
||||
JIRA = "jira", _("JIRA")
|
||||
SLACK = "slack", _("Slack")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
enabled = models.BooleanField(default=False)
|
||||
connected = models.BooleanField(null=True, blank=True)
|
||||
connection_last_checked_at = models.DateTimeField(null=True, blank=True)
|
||||
integration_type = IntegrationTypeEnumField(choices=IntegrationChoices.choices)
|
||||
configuration = models.JSONField(default=dict)
|
||||
_credentials = models.BinaryField(db_column="credentials")
|
||||
|
||||
providers = models.ManyToManyField(
|
||||
Provider,
|
||||
related_name="integrations",
|
||||
through="IntegrationProviderRelationship",
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "integrations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "integrations"
|
||||
|
||||
@property
|
||||
def credentials(self):
|
||||
if isinstance(self._credentials, memoryview):
|
||||
encrypted_bytes = self._credentials.tobytes()
|
||||
elif isinstance(self._credentials, str):
|
||||
encrypted_bytes = self._credentials.encode()
|
||||
else:
|
||||
encrypted_bytes = self._credentials
|
||||
decrypted_data = fernet.decrypt(encrypted_bytes)
|
||||
return json.loads(decrypted_data.decode())
|
||||
|
||||
@credentials.setter
|
||||
def credentials(self, value):
|
||||
encrypted_data = fernet.encrypt(json.dumps(value).encode())
|
||||
self._credentials = encrypted_data
|
||||
|
||||
|
||||
class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
integration = models.ForeignKey(Integration, on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "integration_provider_mappings"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["integration_id", "provider_id"],
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
@@ -2,7 +2,8 @@ from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import DEFAULT_DB_ALIAS, models
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
from django.db import models
|
||||
from django.db.backends.ddl_references import Statement, Table
|
||||
|
||||
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
|
||||
@@ -58,11 +59,11 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -87,7 +88,9 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = f"{self.rls_sql_query}{policy_queries}{grant_queries}"
|
||||
full_create_sql_query = (
|
||||
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
|
||||
)
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
@@ -104,20 +107,16 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
raw_table_name = model._meta.db_table
|
||||
table_name = raw_table_name
|
||||
if self.partition_name:
|
||||
raw_table_name = f"{raw_table_name}_{self.partition_name}"
|
||||
table_name = raw_table_name
|
||||
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
|
||||
)
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
raw_table_name=raw_table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from config.celery import celery_app
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Provider
|
||||
from config.celery import celery_app
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
@@ -31,5 +31,4 @@ before_task_publish.connect(
|
||||
@receiver(post_delete, sender=Provider)
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
task_name = f"scan-perform-scheduled-{instance.id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
delete_related_daily_task(instance.id)
|
||||
|
||||
@@ -131,9 +131,10 @@ class TestBatchDelete:
|
||||
return provider_count
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_batch_delete(self, create_test_providers):
|
||||
def test_batch_delete(self, tenants_fixture, create_test_providers):
|
||||
tenant_id = str(tenants_fixture[0].id)
|
||||
_, summary = batch_delete(
|
||||
Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
tenant_id, Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
|
||||
@@ -1,19 +1,7 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
from api.models import (
|
||||
Membership,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.v1.serializers import TokenSerializer
|
||||
from unittest.mock import patch, ANY, Mock
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -316,96 +304,3 @@ class TestProviderViewSet:
|
||||
reverse("provider-connection", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLimitedVisibility:
|
||||
TEST_EMAIL = "rbac@rbac.com"
|
||||
TEST_PASSWORD = "thisisapassword123"
|
||||
|
||||
@pytest.fixture
|
||||
def limited_admin_user(
|
||||
self, django_db_setup, django_db_blocker, tenants_fixture, providers_fixture
|
||||
):
|
||||
with django_db_blocker.unblock():
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email=self.TEST_EMAIL,
|
||||
password=self.TEST_PASSWORD,
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
role = Role.objects.create(
|
||||
name="limited_visibility",
|
||||
tenant=tenant,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
provider_group = ProviderGroup.objects.create(
|
||||
name="limited_visibility_group",
|
||||
tenant=tenant,
|
||||
)
|
||||
ProviderGroupMembership.objects.create(
|
||||
tenant=tenant,
|
||||
provider=provider,
|
||||
provider_group=provider_group,
|
||||
)
|
||||
|
||||
RoleProviderGroupRelationship.objects.create(
|
||||
tenant=tenant, role=role, provider_group=provider_group
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_limited(
|
||||
self, limited_admin_user, tenants_fixture, client
|
||||
):
|
||||
client.user = limited_admin_user
|
||||
tenant_id = tenants_fixture[0].id
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": self.TEST_EMAIL,
|
||||
"password": self.TEST_PASSWORD,
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
def test_integrations(
|
||||
self, authenticated_client_rbac_limited, integrations_fixture, providers_fixture
|
||||
):
|
||||
# Integration 2 is related to provider1 and provider 2
|
||||
# This user cannot see provider 2
|
||||
integration = integrations_fixture[1]
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id})
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert integration.providers.count() == 2
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
|
||||
@@ -14,7 +14,6 @@ from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
from api.models import (
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
Provider,
|
||||
@@ -40,14 +39,6 @@ def today_after_n_days(n_days: int) -> str:
|
||||
)
|
||||
|
||||
|
||||
class TestViewSet:
|
||||
def test_security_headers(self, client):
|
||||
response = client.get("/")
|
||||
assert response.headers["X-Content-Type-Options"] == "nosniff"
|
||||
assert response.headers["X-Frame-Options"] == "DENY"
|
||||
assert response.headers["Referrer-Policy"] == "strict-origin-when-cross-origin"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUserViewSet:
|
||||
def test_users_list(self, authenticated_client, create_test_user):
|
||||
@@ -2209,12 +2200,9 @@ class TestScanViewSet:
|
||||
dummy_task.id = "dummy-task-id"
|
||||
dummy_task_data = {"id": dummy_task.id, "state": StateChoices.EXECUTING}
|
||||
|
||||
with (
|
||||
patch("api.v1.views.Task.objects.get", return_value=dummy_task),
|
||||
patch(
|
||||
"api.v1.views.TaskSerializer",
|
||||
return_value=type("DummySerializer", (), {"data": dummy_task_data}),
|
||||
),
|
||||
with patch("api.v1.views.Task.objects.get", return_value=dummy_task), patch(
|
||||
"api.v1.views.TaskSerializer",
|
||||
return_value=type("DummySerializer", (), {"data": dummy_task_data}),
|
||||
):
|
||||
url = reverse("scan-report", kwargs={"pk": scan.id})
|
||||
response = authenticated_client.get(url)
|
||||
@@ -4599,415 +4587,3 @@ class TestScheduleViewSet:
|
||||
reverse("schedule-daily"), data=json_payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestIntegrationViewSet:
|
||||
def test_integrations_list(self, authenticated_client, integrations_fixture):
|
||||
response = authenticated_client.get(reverse("integration-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == len(integrations_fixture)
|
||||
|
||||
def test_integrations_retrieve(self, authenticated_client, integrations_fixture):
|
||||
integration1, *_ = integrations_fixture
|
||||
response = authenticated_client.get(
|
||||
reverse("integration-detail", kwargs={"pk": integration1.id}),
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["id"] == str(integration1.id)
|
||||
assert (
|
||||
response.json()["data"]["attributes"]["configuration"]
|
||||
== integration1.configuration
|
||||
)
|
||||
|
||||
def test_integrations_invalid_retrieve(self, authenticated_client):
|
||||
response = authenticated_client.get(
|
||||
reverse(
|
||||
"integration-detail",
|
||||
kwargs={"pk": "f498b103-c760-4785-9a3e-e23fafbb7b02"},
|
||||
)
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"include_values, expected_resources",
|
||||
[
|
||||
("providers", ["providers"]),
|
||||
],
|
||||
)
|
||||
def test_integrations_list_include(
|
||||
self,
|
||||
include_values,
|
||||
expected_resources,
|
||||
authenticated_client,
|
||||
integrations_fixture,
|
||||
):
|
||||
response = authenticated_client.get(
|
||||
reverse("integration-list"), {"include": include_values}
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == len(integrations_fixture)
|
||||
assert "included" in response.json()
|
||||
|
||||
included_data = response.json()["included"]
|
||||
for expected_type in expected_resources:
|
||||
assert any(
|
||||
d.get("type") == expected_type for d in included_data
|
||||
), f"Expected type '{expected_type}' not found in included data"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"integration_type, configuration, credentials",
|
||||
[
|
||||
# Amazon S3 - AWS credentials
|
||||
(
|
||||
Integration.IntegrationChoices.S3,
|
||||
{
|
||||
"bucket_name": "bucket-name",
|
||||
"output_directory": "output-directory",
|
||||
},
|
||||
{
|
||||
"role_arn": "arn:aws",
|
||||
"external_id": "external-id",
|
||||
},
|
||||
),
|
||||
# Amazon S3 - No credentials (AWS self-hosted)
|
||||
(
|
||||
Integration.IntegrationChoices.S3,
|
||||
{
|
||||
"bucket_name": "bucket-name",
|
||||
"output_directory": "output-directory",
|
||||
},
|
||||
{},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_integrations_create_valid(
|
||||
self,
|
||||
authenticated_client,
|
||||
providers_fixture,
|
||||
integration_type,
|
||||
configuration,
|
||||
credentials,
|
||||
):
|
||||
provider = Provider.objects.first()
|
||||
|
||||
data = {
|
||||
"data": {
|
||||
"type": "integrations",
|
||||
"attributes": {
|
||||
"integration_type": integration_type,
|
||||
"configuration": configuration,
|
||||
"credentials": credentials,
|
||||
},
|
||||
"relationships": {
|
||||
"providers": {
|
||||
"data": [{"type": "providers", "id": str(provider.id)}]
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
response = authenticated_client.post(
|
||||
reverse("integration-list"),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert Integration.objects.count() == 1
|
||||
integration = Integration.objects.first()
|
||||
assert integration.configuration == data["data"]["attributes"]["configuration"]
|
||||
assert (
|
||||
integration.integration_type
|
||||
== data["data"]["attributes"]["integration_type"]
|
||||
)
|
||||
assert "credentials" not in response.json()["data"]["attributes"]
|
||||
assert (
|
||||
str(provider.id)
|
||||
== data["data"]["relationships"]["providers"]["data"][0]["id"]
|
||||
)
|
||||
|
||||
def test_integrations_create_valid_relationships(
|
||||
self,
|
||||
authenticated_client,
|
||||
providers_fixture,
|
||||
):
|
||||
provider1, provider2, *_ = providers_fixture
|
||||
|
||||
data = {
|
||||
"data": {
|
||||
"type": "integrations",
|
||||
"attributes": {
|
||||
"integration_type": Integration.IntegrationChoices.S3,
|
||||
"configuration": {
|
||||
"bucket_name": "bucket-name",
|
||||
"output_directory": "output-directory",
|
||||
},
|
||||
"credentials": {
|
||||
"role_arn": "arn:aws",
|
||||
"external_id": "external-id",
|
||||
},
|
||||
},
|
||||
"relationships": {
|
||||
"providers": {
|
||||
"data": [
|
||||
{"type": "providers", "id": str(provider1.id)},
|
||||
{"type": "providers", "id": str(provider2.id)},
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
response = authenticated_client.post(
|
||||
reverse("integration-list"),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert Integration.objects.first().providers.count() == 2
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"attributes, error_code, error_pointer",
|
||||
(
|
||||
[
|
||||
(
|
||||
{
|
||||
"integration_type": "whatever",
|
||||
"configuration": {
|
||||
"bucket_name": "bucket-name",
|
||||
"output_directory": "output-directory",
|
||||
},
|
||||
"credentials": {
|
||||
"role_arn": "arn:aws",
|
||||
"external_id": "external-id",
|
||||
},
|
||||
},
|
||||
"invalid_choice",
|
||||
"integration_type",
|
||||
),
|
||||
(
|
||||
{
|
||||
"integration_type": "amazon_s3",
|
||||
"configuration": {},
|
||||
"credentials": {
|
||||
"role_arn": "arn:aws",
|
||||
"external_id": "external-id",
|
||||
},
|
||||
},
|
||||
"required",
|
||||
"bucket_name",
|
||||
),
|
||||
(
|
||||
{
|
||||
"integration_type": "amazon_s3",
|
||||
"configuration": {
|
||||
"bucket_name": "bucket_name",
|
||||
"output_directory": "output_directory",
|
||||
"invalid_key": "invalid_value",
|
||||
},
|
||||
"credentials": {
|
||||
"role_arn": "arn:aws",
|
||||
"external_id": "external-id",
|
||||
},
|
||||
},
|
||||
"invalid",
|
||||
None,
|
||||
),
|
||||
(
|
||||
{
|
||||
"integration_type": "amazon_s3",
|
||||
"configuration": {
|
||||
"bucket_name": "bucket_name",
|
||||
"output_directory": "output_directory",
|
||||
},
|
||||
"credentials": {"invalid_key": "invalid_key"},
|
||||
},
|
||||
"invalid",
|
||||
None,
|
||||
),
|
||||
]
|
||||
),
|
||||
)
|
||||
def test_integrations_invalid_create(
|
||||
self,
|
||||
authenticated_client,
|
||||
attributes,
|
||||
error_code,
|
||||
error_pointer,
|
||||
):
|
||||
data = {
|
||||
"data": {
|
||||
"type": "integrations",
|
||||
"attributes": attributes,
|
||||
}
|
||||
}
|
||||
response = authenticated_client.post(
|
||||
reverse("integration-list"),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.json()["errors"][0]["code"] == error_code
|
||||
assert (
|
||||
response.json()["errors"][0]["source"]["pointer"]
|
||||
== f"/data/attributes/{error_pointer}"
|
||||
if error_pointer
|
||||
else "/data"
|
||||
)
|
||||
|
||||
def test_integrations_partial_update(
|
||||
self, authenticated_client, integrations_fixture
|
||||
):
|
||||
integration, *_ = integrations_fixture
|
||||
data = {
|
||||
"data": {
|
||||
"type": "integrations",
|
||||
"id": str(integration.id),
|
||||
"attributes": {
|
||||
"credentials": {
|
||||
"aws_access_key_id": "new_value",
|
||||
},
|
||||
# integration_type is `amazon_s3`
|
||||
"configuration": {
|
||||
"bucket_name": "new_bucket_name",
|
||||
"output_directory": "new_output_directory",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
response = authenticated_client.patch(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id}),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
integration.refresh_from_db()
|
||||
assert integration.credentials["aws_access_key_id"] == "new_value"
|
||||
assert integration.configuration["bucket_name"] == "new_bucket_name"
|
||||
assert integration.configuration["output_directory"] == "new_output_directory"
|
||||
|
||||
def test_integrations_partial_update_relationships(
|
||||
self, authenticated_client, integrations_fixture
|
||||
):
|
||||
integration, *_ = integrations_fixture
|
||||
data = {
|
||||
"data": {
|
||||
"type": "integrations",
|
||||
"id": str(integration.id),
|
||||
"attributes": {
|
||||
"credentials": {
|
||||
"aws_access_key_id": "new_value",
|
||||
},
|
||||
# integration_type is `amazon_s3`
|
||||
"configuration": {
|
||||
"bucket_name": "new_bucket_name",
|
||||
"output_directory": "new_output_directory",
|
||||
},
|
||||
},
|
||||
"relationships": {"providers": {"data": []}},
|
||||
}
|
||||
}
|
||||
|
||||
assert integration.providers.count() > 0
|
||||
response = authenticated_client.patch(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id}),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
integration.refresh_from_db()
|
||||
assert integration.providers.count() == 0
|
||||
|
||||
def test_integrations_partial_update_invalid_content_type(
|
||||
self, authenticated_client, integrations_fixture
|
||||
):
|
||||
integration, *_ = integrations_fixture
|
||||
response = authenticated_client.patch(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id}),
|
||||
data={},
|
||||
)
|
||||
assert response.status_code == status.HTTP_415_UNSUPPORTED_MEDIA_TYPE
|
||||
|
||||
def test_integrations_partial_update_invalid_content(
|
||||
self, authenticated_client, integrations_fixture
|
||||
):
|
||||
integration, *_ = integrations_fixture
|
||||
data = {
|
||||
"data": {
|
||||
"type": "integrations",
|
||||
"id": str(integration.id),
|
||||
"attributes": {"invalid_config": "value"},
|
||||
}
|
||||
}
|
||||
response = authenticated_client.patch(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id}),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
def test_integrations_delete(
|
||||
self,
|
||||
authenticated_client,
|
||||
integrations_fixture,
|
||||
):
|
||||
integration, *_ = integrations_fixture
|
||||
response = authenticated_client.delete(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
def test_integrations_delete_invalid(self, authenticated_client):
|
||||
response = authenticated_client.delete(
|
||||
reverse(
|
||||
"integration-detail",
|
||||
kwargs={"pk": "e67d0283-440f-48d1-b5f8-38d0763474f4"},
|
||||
)
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"filter_name, filter_value, expected_count",
|
||||
(
|
||||
[
|
||||
("inserted_at", TODAY, 2),
|
||||
("inserted_at.gte", "2024-01-01", 2),
|
||||
("inserted_at.lte", "2024-01-01", 0),
|
||||
("integration_type", Integration.IntegrationChoices.S3, 2),
|
||||
("integration_type", Integration.IntegrationChoices.SLACK, 0),
|
||||
(
|
||||
"integration_type__in",
|
||||
f"{Integration.IntegrationChoices.S3},{Integration.IntegrationChoices.SLACK}",
|
||||
2,
|
||||
),
|
||||
]
|
||||
),
|
||||
)
|
||||
def test_integrations_filters(
|
||||
self,
|
||||
authenticated_client,
|
||||
integrations_fixture,
|
||||
filter_name,
|
||||
filter_value,
|
||||
expected_count,
|
||||
):
|
||||
response = authenticated_client.get(
|
||||
reverse("integration-list"),
|
||||
{f"filter[{filter_name}]": filter_value},
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == expected_count
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"filter_name",
|
||||
(
|
||||
[
|
||||
"invalid",
|
||||
]
|
||||
),
|
||||
)
|
||||
def test_integrations_filters_invalid(self, authenticated_client, filter_name):
|
||||
response = authenticated_client.get(
|
||||
reverse("integration-list"),
|
||||
{f"filter[{filter_name}]": "whatever"},
|
||||
)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
@@ -1,122 +0,0 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class S3ConfigSerializer(BaseValidateSerializer):
|
||||
bucket_name = serializers.CharField()
|
||||
output_directory = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class AWSCredentialSerializer(BaseValidateSerializer):
|
||||
role_arn = serializers.CharField(required=False)
|
||||
external_id = serializers.CharField(required=False)
|
||||
role_session_name = serializers.CharField(required=False)
|
||||
session_duration = serializers.IntegerField(
|
||||
required=False, min_value=900, max_value=43200
|
||||
)
|
||||
aws_access_key_id = serializers.CharField(required=False)
|
||||
aws_secret_access_key = serializers.CharField(required=False)
|
||||
aws_session_token = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Credentials",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationCredentialField(serializers.JSONField):
|
||||
pass
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Amazon S3",
|
||||
"properties": {
|
||||
"bucket_name": {
|
||||
"type": "string",
|
||||
"description": "The name of the S3 bucket where files will be stored.",
|
||||
},
|
||||
"output_directory": {
|
||||
"type": "string",
|
||||
"description": "The directory path within the bucket where files will be saved.",
|
||||
},
|
||||
},
|
||||
"required": ["bucket_name", "output_directory"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationConfigField(serializers.JSONField):
|
||||
pass
|
||||
@@ -16,8 +16,6 @@ from rest_framework_simplejwt.tokens import RefreshToken
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
InvitationRoleRelationship,
|
||||
Membership,
|
||||
@@ -36,12 +34,6 @@ from api.models import (
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.v1.serializer_utils.integrations import (
|
||||
AWSCredentialSerializer,
|
||||
IntegrationConfigField,
|
||||
IntegrationCredentialField,
|
||||
S3ConfigSerializer,
|
||||
)
|
||||
|
||||
# Tokens
|
||||
|
||||
@@ -1614,8 +1606,8 @@ class RoleSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"manage_account",
|
||||
# Disable for the first release
|
||||
# "manage_billing",
|
||||
# "manage_integrations",
|
||||
# /Disable for the first release
|
||||
"manage_integrations",
|
||||
"manage_providers",
|
||||
"manage_scans",
|
||||
"permission_state",
|
||||
@@ -2021,201 +2013,3 @@ class ScheduleDailyCreateSerializer(serializers.Serializer):
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class BaseWriteIntegrationSerializer(BaseWriteSerializer):
|
||||
@staticmethod
|
||||
def validate_integration_data(
|
||||
integration_type: str,
|
||||
providers: list[Provider], # noqa
|
||||
configuration: dict,
|
||||
credentials: dict,
|
||||
):
|
||||
if integration_type == Integration.IntegrationChoices.S3:
|
||||
config_serializer = S3ConfigSerializer
|
||||
credentials_serializers = [AWSCredentialSerializer]
|
||||
# TODO: This will be required for AWS Security Hub
|
||||
# if providers and not all(
|
||||
# provider.provider == Provider.ProviderChoices.AWS
|
||||
# for provider in providers
|
||||
# ):
|
||||
# raise serializers.ValidationError(
|
||||
# {"providers": "All providers must be AWS for the S3 integration."}
|
||||
# )
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"integration_type": f"Integration type not supported yet: {integration_type}"
|
||||
}
|
||||
)
|
||||
|
||||
config_serializer(data=configuration).is_valid(raise_exception=True)
|
||||
|
||||
for cred_serializer in credentials_serializers:
|
||||
try:
|
||||
cred_serializer(data=credentials).is_valid(raise_exception=True)
|
||||
break
|
||||
except ValidationError:
|
||||
continue
|
||||
else:
|
||||
raise ValidationError(
|
||||
{"credentials": "Invalid credentials for the integration type."}
|
||||
)
|
||||
|
||||
|
||||
class IntegrationSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Integration model.
|
||||
"""
|
||||
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"providers",
|
||||
"url",
|
||||
]
|
||||
|
||||
included_serializers = {
|
||||
"providers": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
representation = super().to_representation(instance)
|
||||
allowed_providers = self.context.get("allowed_providers")
|
||||
if allowed_providers:
|
||||
allowed_provider_ids = {str(provider.id) for provider in allowed_providers}
|
||||
representation["providers"] = [
|
||||
provider
|
||||
for provider in representation["providers"]
|
||||
if provider["id"] in allowed_provider_ids
|
||||
]
|
||||
return representation
|
||||
|
||||
|
||||
class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
|
||||
credentials = IntegrationCredentialField(write_only=True)
|
||||
configuration = IntegrationConfigField()
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"credentials",
|
||||
"providers",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"connected": {"read_only": True},
|
||||
"enabled": {"read_only": True},
|
||||
"connection_last_checked_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
integration_type = attrs.get("integration_type")
|
||||
providers = attrs.get("providers")
|
||||
configuration = attrs.get("configuration")
|
||||
credentials = attrs.get("credentials")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_integration_data(
|
||||
integration_type, providers, configuration, credentials
|
||||
)
|
||||
return validated_attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
providers = validated_data.pop("providers", [])
|
||||
integration = Integration.objects.create(tenant_id=tenant_id, **validated_data)
|
||||
|
||||
through_model_instances = [
|
||||
IntegrationProviderRelationship(
|
||||
integration=integration,
|
||||
provider=provider,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for provider in providers
|
||||
]
|
||||
IntegrationProviderRelationship.objects.bulk_create(through_model_instances)
|
||||
|
||||
return integration
|
||||
|
||||
|
||||
class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
credentials = IntegrationCredentialField(write_only=True, required=False)
|
||||
configuration = IntegrationConfigField(required=False)
|
||||
providers = serializers.ResourceRelatedField(
|
||||
queryset=Provider.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"enabled",
|
||||
"connected",
|
||||
"connection_last_checked_at",
|
||||
"integration_type",
|
||||
"configuration",
|
||||
"credentials",
|
||||
"providers",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"connected": {"read_only": True},
|
||||
"connection_last_checked_at": {"read_only": True},
|
||||
"integration_type": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
integration_type = self.instance.integration_type
|
||||
providers = attrs.get("providers")
|
||||
configuration = attrs.get("configuration") or self.instance.configuration
|
||||
credentials = attrs.get("credentials") or self.instance.credentials
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_integration_data(
|
||||
integration_type, providers, configuration, credentials
|
||||
)
|
||||
return validated_attrs
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if validated_data.get("providers") is not None:
|
||||
instance.providers.clear()
|
||||
new_relationships = [
|
||||
IntegrationProviderRelationship(
|
||||
integration=instance, provider=provider, tenant_id=tenant_id
|
||||
)
|
||||
for provider in validated_data["providers"]
|
||||
]
|
||||
IntegrationProviderRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
@@ -10,7 +10,6 @@ from api.v1.views import (
|
||||
FindingViewSet,
|
||||
GithubSocialLoginView,
|
||||
GoogleSocialLoginView,
|
||||
IntegrationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
InvitationViewSet,
|
||||
MembershipViewSet,
|
||||
@@ -48,7 +47,6 @@ router.register(
|
||||
)
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
router.register(r"integrations", IntegrationViewSet, basename="integration")
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
|
||||
@@ -55,10 +55,10 @@ from tasks.tasks import (
|
||||
|
||||
from api.base_views import BaseRLSViewSet, BaseTenantViewset, BaseUserViewset
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.filters import (
|
||||
ComplianceOverviewFilter,
|
||||
FindingFilter,
|
||||
IntegrationFilter,
|
||||
InvitationFilter,
|
||||
MembershipFilter,
|
||||
ProviderFilter,
|
||||
@@ -76,7 +76,6 @@ from api.filters import (
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
Provider,
|
||||
@@ -105,9 +104,6 @@ from api.v1.serializers import (
|
||||
FindingDynamicFilterSerializer,
|
||||
FindingMetadataSerializer,
|
||||
FindingSerializer,
|
||||
IntegrationCreateSerializer,
|
||||
IntegrationSerializer,
|
||||
IntegrationUpdateSerializer,
|
||||
InvitationAcceptSerializer,
|
||||
InvitationCreateSerializer,
|
||||
InvitationSerializer,
|
||||
@@ -245,7 +241,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.6.0"
|
||||
spectacular_settings.VERSION = "1.5.3"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -301,11 +297,6 @@ class SchemaView(SpectacularAPIView):
|
||||
"description": "Endpoints for task management, allowing retrieval of task status and "
|
||||
"revoking tasks that have not started.",
|
||||
},
|
||||
{
|
||||
"name": "Integration",
|
||||
"description": "Endpoints for managing third-party integrations, including registration, configuration,"
|
||||
" retrieval, and deletion of integrations such as S3, JIRA, or other services.",
|
||||
},
|
||||
]
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
@@ -1087,6 +1078,7 @@ class ProviderViewSet(BaseRLSViewSet):
|
||||
provider = get_object_or_404(Provider, pk=pk)
|
||||
provider.is_deleted = True
|
||||
provider.save()
|
||||
delete_related_daily_task(str(provider.id))
|
||||
|
||||
with transaction.atomic():
|
||||
task = delete_provider_task.delay(
|
||||
@@ -2451,67 +2443,3 @@ class ScheduleViewSet(BaseRLSViewSet):
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Integration"],
|
||||
summary="List all integrations",
|
||||
description="Retrieve a list of all configured integrations with options for filtering by various criteria.",
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
tags=["Integration"],
|
||||
summary="Retrieve integration details",
|
||||
description="Fetch detailed information about a specific integration by its ID.",
|
||||
),
|
||||
create=extend_schema(
|
||||
tags=["Integration"],
|
||||
summary="Create a new integration",
|
||||
description="Register a new integration with the system, providing necessary configuration details.",
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
tags=["Integration"],
|
||||
summary="Partially update an integration",
|
||||
description="Modify certain fields of an existing integration without affecting other settings.",
|
||||
),
|
||||
destroy=extend_schema(
|
||||
tags=["Integration"],
|
||||
summary="Delete an integration",
|
||||
description="Remove an integration from the system by its ID.",
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
@method_decorator(CACHE_DECORATOR, name="retrieve")
|
||||
class IntegrationViewSet(BaseRLSViewSet):
|
||||
queryset = Integration.objects.all()
|
||||
serializer_class = IntegrationSerializer
|
||||
http_method_names = ["get", "post", "patch", "delete"]
|
||||
filterset_class = IntegrationFilter
|
||||
ordering = ["integration_type", "-inserted_at"]
|
||||
# RBAC required permissions
|
||||
required_permissions = [Permissions.MANAGE_INTEGRATIONS]
|
||||
allowed_providers = None
|
||||
|
||||
def get_queryset(self):
|
||||
user_roles = get_role(self.request.user)
|
||||
if user_roles.unlimited_visibility:
|
||||
# User has unlimited visibility, return all integrations
|
||||
queryset = Integration.objects.filter(tenant_id=self.request.tenant_id)
|
||||
else:
|
||||
# User lacks permission, filter providers based on provider groups associated with the role
|
||||
allowed_providers = get_providers(user_roles)
|
||||
queryset = Integration.objects.filter(providers__in=allowed_providers)
|
||||
self.allowed_providers = allowed_providers
|
||||
return queryset
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return IntegrationCreateSerializer
|
||||
elif self.action == "partial_update":
|
||||
return IntegrationUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["allowed_providers"] = self.allowed_providers
|
||||
return context
|
||||
|
||||
@@ -50,9 +50,9 @@ class RLSTask(Task):
|
||||
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
with rls_transaction(tenant_id):
|
||||
APITask.objects.create(
|
||||
APITask.objects.update_or_create(
|
||||
id=task_result_instance.task_id,
|
||||
tenant_id=tenant_id,
|
||||
task_runner_task=task_result_instance,
|
||||
defaults={"task_runner_task": task_result_instance},
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -2,9 +2,10 @@ import json
|
||||
import logging
|
||||
from enum import StrEnum
|
||||
|
||||
from config.env import env
|
||||
from django_guid.log_filters import CorrelationId
|
||||
|
||||
from config.env import env
|
||||
|
||||
|
||||
class BackendLogger(StrEnum):
|
||||
GUNICORN = "gunicorn"
|
||||
@@ -38,9 +39,9 @@ class NDJSONFormatter(logging.Formatter):
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
"transaction_id": (
|
||||
record.transaction_id if hasattr(record, "transaction_id") else None
|
||||
),
|
||||
"transaction_id": record.transaction_id
|
||||
if hasattr(record, "transaction_id")
|
||||
else None,
|
||||
}
|
||||
|
||||
# Add REST API extra fields
|
||||
|
||||
@@ -237,7 +237,4 @@ DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = env.str(
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = env.str("DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN", "")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = env.str("DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION", "")
|
||||
|
||||
# HTTP Security Headers
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
X_FRAME_OPTIONS = "DENY"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
@@ -54,6 +54,8 @@ IGNORED_EXCEPTIONS = [
|
||||
"AzureClientIdAndClientSecretNotBelongingToTenantIdError",
|
||||
"AzureHTTPResponseError",
|
||||
"Error with credentials provided",
|
||||
# AWS Service is not available in a region
|
||||
"EndpointConnectionError",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from config.env import env
|
||||
|
||||
# Provider Oauth settings
|
||||
GOOGLE_OAUTH_CLIENT_ID = env("SOCIAL_GOOGLE_OAUTH_CLIENT_ID", default="")
|
||||
GOOGLE_OAUTH_CLIENT_SECRET = env("SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET", default="")
|
||||
GOOGLE_OAUTH_CALLBACK_URL = env("SOCIAL_GOOGLE_OAUTH_CALLBACK_URL", default="")
|
||||
# Google Oauth settings
|
||||
GOOGLE_OAUTH_CLIENT_ID = env("DJANGO_GOOGLE_OAUTH_CLIENT_ID", default="")
|
||||
GOOGLE_OAUTH_CLIENT_SECRET = env("DJANGO_GOOGLE_OAUTH_CLIENT_SECRET", default="")
|
||||
GOOGLE_OAUTH_CALLBACK_URL = env("DJANGO_GOOGLE_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
GITHUB_OAUTH_CLIENT_ID = env("SOCIAL_GITHUB_OAUTH_CLIENT_ID", default="")
|
||||
GITHUB_OAUTH_CLIENT_SECRET = env("SOCIAL_GITHUB_OAUTH_CLIENT_SECRET", default="")
|
||||
GITHUB_OAUTH_CALLBACK_URL = env("SOCIAL_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
GITHUB_OAUTH_CLIENT_ID = env("DJANGO_GITHUB_OAUTH_CLIENT_ID", default="")
|
||||
GITHUB_OAUTH_CLIENT_SECRET = env("DJANGO_GITHUB_OAUTH_CLIENT_SECRET", default="")
|
||||
GITHUB_OAUTH_CALLBACK_URL = env("DJANGO_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
# Allauth settings
|
||||
ACCOUNT_LOGIN_METHODS = {"email"} # Use Email / Password authentication
|
||||
|
||||
@@ -15,8 +15,6 @@ from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
Membership,
|
||||
Provider,
|
||||
@@ -879,46 +877,6 @@ def scan_summaries_fixture(tenants_fixture, providers_fixture):
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def integrations_fixture(providers_fixture):
|
||||
provider1, provider2, *_ = providers_fixture
|
||||
tenant_id = provider1.tenant_id
|
||||
integration1 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration1,
|
||||
provider=provider1,
|
||||
)
|
||||
|
||||
integration2 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider1,
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider2,
|
||||
)
|
||||
|
||||
return integration1, integration2
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.db import DatabaseError, transaction
|
||||
from django.db import DatabaseError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import batch_delete, rls_transaction
|
||||
@@ -8,11 +8,12 @@ from api.models import Finding, Provider, Resource, Scan, ScanSummary, Tenant
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def delete_provider(pk: str):
|
||||
def delete_provider(tenant_id: str, pk: str):
|
||||
"""
|
||||
Gracefully deletes an instance of a provider along with its related data.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the resources belong to.
|
||||
pk (str): The primary key of the Provider instance to delete.
|
||||
|
||||
Returns:
|
||||
@@ -21,37 +22,32 @@ def delete_provider(pk: str):
|
||||
|
||||
Raises:
|
||||
Provider.DoesNotExist: If no instance with the provided primary key exists.
|
||||
DatabaseError: If any deletion step fails.
|
||||
"""
|
||||
instance = Provider.all_objects.get(pk=pk)
|
||||
|
||||
deletion_summary = {}
|
||||
|
||||
deletion_steps = [
|
||||
("Scan Summaries", ScanSummary.all_objects.filter(scan__provider=instance)),
|
||||
("Findings", Finding.all_objects.filter(scan__provider=instance)),
|
||||
("Resources", Resource.all_objects.filter(provider=instance)),
|
||||
("Scans", Scan.all_objects.filter(provider=instance)),
|
||||
]
|
||||
with rls_transaction(tenant_id):
|
||||
instance = Provider.all_objects.get(pk=pk)
|
||||
deletion_summary = {}
|
||||
deletion_steps = [
|
||||
("Scan Summaries", ScanSummary.all_objects.filter(scan__provider=instance)),
|
||||
("Findings", Finding.all_objects.filter(scan__provider=instance)),
|
||||
("Resources", Resource.all_objects.filter(provider=instance)),
|
||||
("Scans", Scan.all_objects.filter(provider=instance)),
|
||||
]
|
||||
|
||||
for step_name, queryset in deletion_steps:
|
||||
try:
|
||||
with transaction.atomic():
|
||||
_, step_summary = batch_delete(queryset)
|
||||
deletion_summary.update(step_summary)
|
||||
except DatabaseError as error:
|
||||
logger.error(f"Error deleting {step_name}: {error}")
|
||||
_, step_summary = batch_delete(tenant_id, queryset)
|
||||
deletion_summary.update(step_summary)
|
||||
except DatabaseError as db_error:
|
||||
logger.error(f"Error deleting {step_name}: {db_error}")
|
||||
raise
|
||||
|
||||
# Delete the provider itself
|
||||
try:
|
||||
with transaction.atomic():
|
||||
with rls_transaction(tenant_id):
|
||||
_, provider_summary = instance.delete()
|
||||
deletion_summary.update(provider_summary)
|
||||
except DatabaseError as error:
|
||||
logger.error(f"Error deleting Provider: {error}")
|
||||
deletion_summary.update(provider_summary)
|
||||
except DatabaseError as db_error:
|
||||
logger.error(f"Error deleting Provider: {db_error}")
|
||||
raise
|
||||
|
||||
return deletion_summary
|
||||
|
||||
|
||||
@@ -69,9 +65,8 @@ def delete_tenant(pk: str):
|
||||
deletion_summary = {}
|
||||
|
||||
for provider in Provider.objects.using(MainRouter.admin_db).filter(tenant_id=pk):
|
||||
with rls_transaction(pk):
|
||||
summary = delete_provider(provider.id)
|
||||
deletion_summary.update(summary)
|
||||
summary = delete_provider(pk, provider.id)
|
||||
deletion_summary.update(summary)
|
||||
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(id=pk).delete()
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
|
||||
@@ -21,6 +22,7 @@ from api.db_utils import rls_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.models import Finding, Provider, Scan, ScanSummary, StateChoices
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
@@ -43,9 +45,10 @@ def check_provider_connection_task(provider_id: str):
|
||||
return check_provider_connection(provider_id=provider_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="provider-deletion", queue="deletion")
|
||||
@set_tenant
|
||||
def delete_provider_task(provider_id: str):
|
||||
@shared_task(
|
||||
base=RLSTask, name="provider-deletion", queue="deletion", autoretry_for=(Exception,)
|
||||
)
|
||||
def delete_provider_task(provider_id: str, tenant_id: str):
|
||||
"""
|
||||
Task to delete a specific Provider instance.
|
||||
|
||||
@@ -53,6 +56,7 @@ def delete_provider_task(provider_id: str):
|
||||
|
||||
Args:
|
||||
provider_id (str): The primary key of the `Provider` instance to be deleted.
|
||||
tenant_id (str): Tenant ID the provider belongs to.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing:
|
||||
@@ -60,7 +64,7 @@ def delete_provider_task(provider_id: str):
|
||||
- A dictionary with the count of deleted instances per model,
|
||||
including related models if cascading deletes were triggered.
|
||||
"""
|
||||
return delete_provider(pk=provider_id)
|
||||
return delete_provider(tenant_id=tenant_id, pk=provider_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-perform", queue="scans")
|
||||
@@ -126,6 +130,43 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
periodic_task_instance = PeriodicTask.objects.get(
|
||||
name=f"scan-perform-scheduled-{provider_id}"
|
||||
)
|
||||
|
||||
executed_scan = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
task__task_runner_task__task_id=task_id,
|
||||
).order_by("completed_at")
|
||||
|
||||
if (
|
||||
Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.EXECUTING,
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
scheduled_at__date=datetime.now(timezone.utc).date(),
|
||||
).exists()
|
||||
or executed_scan.exists()
|
||||
):
|
||||
# Duplicated task execution due to visibility timeout or scan is already running
|
||||
logger.warning(f"Duplicated scheduled scan for provider {provider_id}.")
|
||||
try:
|
||||
affected_scan = executed_scan.first()
|
||||
if not affected_scan:
|
||||
raise ValueError(
|
||||
"Error retrieving affected scan details after detecting duplicated scheduled "
|
||||
"scan."
|
||||
)
|
||||
# Return the affected scan details to avoid losing data
|
||||
serializer = ScanTaskSerializer(instance=affected_scan)
|
||||
except Exception as duplicated_scan_exception:
|
||||
logger.error(
|
||||
f"Duplicated scheduled scan for provider {provider_id}. Error retrieving affected scan details: "
|
||||
f"{str(duplicated_scan_exception)}"
|
||||
)
|
||||
raise duplicated_scan_exception
|
||||
return serializer.data
|
||||
|
||||
next_scan_datetime = get_next_execution_datetime(task_id, provider_id)
|
||||
scan_instance, _ = Scan.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
@@ -133,7 +174,11 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state__in=(StateChoices.SCHEDULED, StateChoices.AVAILABLE),
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
defaults={"state": StateChoices.SCHEDULED},
|
||||
defaults={
|
||||
"state": StateChoices.SCHEDULED,
|
||||
"name": "Daily scheduled scan",
|
||||
"scheduled_at": next_scan_datetime - timedelta(days=1),
|
||||
},
|
||||
)
|
||||
|
||||
scan_instance.task_id = task_id
|
||||
@@ -174,7 +219,7 @@ def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="tenant-deletion", queue="deletion")
|
||||
@shared_task(name="tenant-deletion", queue="deletion", autoretry_for=(Exception,))
|
||||
def delete_tenant_task(tenant_id: str):
|
||||
return delete_tenant(pk=tenant_id)
|
||||
|
||||
|
||||
@@ -9,17 +9,19 @@ from api.models import Provider, Tenant
|
||||
class TestDeleteProvider:
|
||||
def test_delete_provider_success(self, providers_fixture):
|
||||
instance = providers_fixture[0]
|
||||
result = delete_provider(instance.id)
|
||||
tenant_id = str(instance.tenant_id)
|
||||
result = delete_provider(tenant_id, instance.id)
|
||||
|
||||
assert result
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
Provider.objects.get(pk=instance.id)
|
||||
|
||||
def test_delete_provider_does_not_exist(self):
|
||||
def test_delete_provider_does_not_exist(self, tenants_fixture):
|
||||
tenant_id = str(tenants_fixture[0].id)
|
||||
non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645"
|
||||
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
delete_provider(non_existent_pk)
|
||||
delete_provider(tenant_id, non_existent_pk)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -89,7 +89,7 @@ for accountId in $ACCOUNTS_IN_ORGS; do
|
||||
# Run Prowler
|
||||
echo -e "Assessing AWS Account: $accountId, using Role: $ROLE on $(date)"
|
||||
# remove -g cislevel for a full report and add other formats if needed
|
||||
./prowler/prowler-cli.py --role arn:"$PARTITION":iam::"$accountId":role/"$ROLE" --compliance cis_1.5_aws -M html
|
||||
./prowler/prowler.py --role arn:"$PARTITION":iam::"$accountId":role/"$ROLE" --compliance cis_1.5_aws -M html
|
||||
echo "Report stored locally at: prowler/output/ directory"
|
||||
TOTAL_SEC=$((SECONDS - START_TIME))
|
||||
echo -e "Completed AWS Account: $accountId, using Role: $ROLE on $(date)"
|
||||
|
||||
@@ -17,7 +17,7 @@ spec:
|
||||
image: toniblyx/prowler:latest
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- "./prowler-cli.py"
|
||||
- "./prowler.py"
|
||||
args: [ "-B", "$(awsS3Bucket)" ]
|
||||
env:
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
|
||||
@@ -562,11 +562,8 @@ def get_section_containers_format1(data, section_1, section_2):
|
||||
|
||||
direct_internal_items.append(internal_section_container)
|
||||
|
||||
# Cut the title if it's too long
|
||||
tittle_external = section[:70] + " ..." if len(section) > 70 else section
|
||||
|
||||
accordion_item = dbc.AccordionItem(
|
||||
title=f"{tittle_external}", children=direct_internal_items
|
||||
title=f"{section}", children=direct_internal_items
|
||||
)
|
||||
section_container = html.Div(
|
||||
[
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_container_iso
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_container_iso(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_CATEGORY", "REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID"
|
||||
)
|
||||
@@ -1,23 +0,0 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_container_iso
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_container_iso(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_CATEGORY", "REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID"
|
||||
)
|
||||
@@ -1,23 +0,0 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_container_iso
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_container_iso(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_CATEGORY", "REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID"
|
||||
)
|
||||
@@ -1,6 +1,6 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
from dashboard.common_methods import get_section_containers_format4
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
@@ -9,15 +9,12 @@ def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
]
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
return get_section_containers_format4(aux, "REQUIREMENTS_ID")
|
||||
|
||||
@@ -294,7 +294,7 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Code holds different methods to remediate the FAIL finding
|
||||
"Code": {
|
||||
# CLI holds the command in the provider native CLI to remediate it
|
||||
"CLI": "aws ec2 modify-image-attribute --region <REGION> --image-id <EC2_AMI_ID> --launch-permission {\"Remove\":[{\"Group\":\"all\"}]}",
|
||||
"CLI": "https://docs.prowler.com/checks/public_8#cli-command",
|
||||
# NativeIaC holds the native IaC code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"NativeIaC": "",
|
||||
# Other holds the other commands, scripts or code to remediate it, use "https://www.trendmicro.com/cloudoneconformity"
|
||||
|
||||
@@ -18,7 +18,7 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
|
||||
"name": "Debug AWS Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler-cli.py",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"aws",
|
||||
"--log-level",
|
||||
@@ -33,7 +33,7 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
|
||||
"name": "Debug Azure Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler-cli.py",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"azure",
|
||||
"--sp-env-auth",
|
||||
@@ -49,7 +49,7 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
|
||||
"name": "Debug GCP Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler-cli.py",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"gcp",
|
||||
"--log-level",
|
||||
@@ -64,7 +64,7 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
|
||||
"name": "Debug K8s Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler-cli.py",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"kubernetes",
|
||||
"--log-level",
|
||||
|
||||
|
Before Width: | Height: | Size: 153 KiB |
|
Before Width: | Height: | Size: 27 KiB |
@@ -219,7 +219,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry install
|
||||
poetry run python prowler-cli.py -v
|
||||
poetry run python prowler.py -v
|
||||
```
|
||||
???+ note
|
||||
If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
|
||||
@@ -29,7 +29,7 @@ mkdir /tmp/poetry
|
||||
poetry config cache-dir /tmp/poetry
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
python prowler-cli.py -v
|
||||
python prowler.py -v
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
|
||||
@@ -86,7 +86,7 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
## Kubernetes
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Kubernetes checks with configurable variables that can be changed in the configuration yaml file:
|
||||
The following list includes all the Azure checks with configurable variables that can be changed in the configuration yaml file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
@@ -96,17 +96,6 @@ The following list includes all the Kubernetes checks with configurable variable
|
||||
| `apiserver_strong_ciphers` | `apiserver_strong_ciphers` | String |
|
||||
| `kubelet_strong_ciphers_only` | `kubelet_strong_ciphers` | String |
|
||||
|
||||
|
||||
## Microsoft365
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Microsoft365 checks with configurable variables that can be changed in the configuration yaml file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `entra_admin_users_sign_in_frequency_enabled` | `sign_in_frequency` | Integer |
|
||||
|
||||
|
||||
## Config YAML File Structure
|
||||
|
||||
???+ note
|
||||
@@ -504,10 +493,4 @@ kubernetes:
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
]
|
||||
|
||||
# Microsoft365 Configuration
|
||||
microsoft365:
|
||||
# Conditional Access Policy
|
||||
# policy.session_controls.sign_in_frequency.frequency in hours
|
||||
sign_in_frequency: 4
|
||||
|
||||
```
|
||||
|
||||
|
Before Width: | Height: | Size: 106 KiB |
|
Before Width: | Height: | Size: 145 KiB |
|
Before Width: | Height: | Size: 70 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 65 KiB |
|
Before Width: | Height: | Size: 69 KiB |
|
Before Width: | Height: | Size: 62 KiB |
|
Before Width: | Height: | Size: 56 KiB |
|
Before Width: | Height: | Size: 45 KiB |
|
Before Width: | Height: | Size: 372 KiB |
|
Before Width: | Height: | Size: 427 KiB |
|
Before Width: | Height: | Size: 108 KiB |
|
Before Width: | Height: | Size: 137 KiB |
|
Before Width: | Height: | Size: 121 KiB |
|
Before Width: | Height: | Size: 113 KiB |
|
Before Width: | Height: | Size: 72 KiB |
|
Before Width: | Height: | Size: 111 KiB |
|
Before Width: | Height: | Size: 71 KiB |
|
Before Width: | Height: | Size: 145 KiB |
|
Before Width: | Height: | Size: 37 KiB |
|
Before Width: | Height: | Size: 144 KiB |
|
Before Width: | Height: | Size: 234 KiB |
|
Before Width: | Height: | Size: 235 KiB |
@@ -1,191 +0,0 @@
|
||||
# Managing Users and Roles
|
||||
|
||||
The **Prowler App** supports multiple users within a single tenant, enabling seamless collaboration by allowing team members to easily share insights and manage security findings.
|
||||
|
||||
[Roles](#roles) help you control user permissions, determining what actions each user can perform and the data they can access within Prowler. By default, each account includes an immutable **admin** role, ensuring that your account always retains administrative access.
|
||||
|
||||
## Membership
|
||||
|
||||
To get to User-Invitation Management we will focus on the Membership section.
|
||||
|
||||
???+ note
|
||||
**Only users that have the _Invite and Manage Users_ or _admin_ permission can access this section.**
|
||||
|
||||
<img src="../img/rbac/membership.png" alt="Membership tab" width="700"/>
|
||||
|
||||
### Users
|
||||
|
||||
#### Editing a User
|
||||
|
||||
Follow these steps to edit a user of your account:
|
||||
|
||||
1. Navigate to **Users** from the side menu.
|
||||
2. Click on the edit button of the user you want to modify.
|
||||
|
||||
<img src="../img/rbac/user_edit.png" alt="Edit User" width="700"/>
|
||||
|
||||
3. Edit the user fields you need and save your changes.
|
||||
|
||||
<img src="../img/rbac/user_edit_details.png" alt="Edit User Details" width="700"/>
|
||||
|
||||
#### Removing a User
|
||||
|
||||
Follow these steps to remove a user of your account:
|
||||
|
||||
1. Navigate to **Users** from the side menu.
|
||||
2. Click on the delete button of your current user.
|
||||
> **Note: Each user will be able to delete himself and not others, regardless of his permissions.**
|
||||
|
||||
<img src="../img/rbac/user_remove.png" alt="Remove User" width="700"/>
|
||||
|
||||
### Invitations
|
||||
|
||||
#### Inviting Users
|
||||
|
||||
???+note
|
||||
_Please be aware that at this time, an email address can only be associated with a single Prowler account._
|
||||
|
||||
Follow these steps to invite a user to your account:
|
||||
|
||||
1. Navigate to **Users** from the side menu.
|
||||
2. Click on the **Invite User** button on the top right-hand corner of the screen.
|
||||
|
||||
<img src="../img/rbac/invite.png" alt="Invite User" width="700"/>
|
||||
|
||||
3. In the Invite User screen, enter the email address of the user you want to invite.
|
||||
4. Pick a Role for the user. You can also change the roles for users and pending invites later. To learn more about the roles and what they can do, see [Roles](#roles).
|
||||
|
||||
<img src="../img/rbac/invitation_info.png" alt="Invitation info" width="700"/>
|
||||
|
||||
5. Click on the **Send Invitation** button to send the invitation to the user.
|
||||
6. After clicking you will see a summary of the status of the invitation. You could access this view again from the invitation menu.
|
||||
|
||||
<img src="../img/rbac/invitation_details.png" alt="Invitation details" width="700"/>
|
||||
<img src="../img/rbac/invitation_details_1.png" alt="Invitation button" width="700"/>
|
||||
|
||||
7. To allow the user to join your Prowler account you will need to share the link with the user. They will only need to access this URL and follow the steps to create a user and complete their registration. **Note: Invitations will expire after 7 days.**
|
||||
|
||||
<img src="../img/rbac/invitation_sign-up.png" alt="Invitation sign-up" width="700"/>
|
||||
|
||||
???+note
|
||||
If you are a [Prowler Cloud](https://cloud.prowler.com/sign-in) user, the invited user will receive an email with the link to accept the invitation.
|
||||
|
||||
#### Editing Invitation
|
||||
|
||||
Follow these steps to edit an invitation:
|
||||
|
||||
1. Navigate to **Invitations** from the side menu.
|
||||
2. Click on the edit button of the invitation and modify the email, the role or both. **Note: Editing an invitation will not reset its expiration time.**
|
||||
|
||||
<img src="../img/rbac/invitation_edit.png" alt="Invitation edit" width="700"/>
|
||||
<img src="../img/rbac/invitation_edit_1.png" alt="Invitation edit details" width="700"/>
|
||||
|
||||
#### Cancelling Invitation
|
||||
|
||||
Follow these steps to cancel an invitation:
|
||||
|
||||
1. Navigate to **Invitations** from the side menu.
|
||||
2. Click on the revoke button of the invitation.
|
||||
|
||||
<img src="../img/rbac/invitation_revoke.png" alt="Invitation revoke" width="700"/>
|
||||
|
||||
#### Sending Invitation Again
|
||||
|
||||
To resend the invitation to the user it is necessary to explicitly **delete the previous invitation and create a new invitation**.
|
||||
|
||||
## Managing Groups and Roles
|
||||
|
||||
The Roles section in Prowler is designed to facilitate the assignment of custom user privileges. This section allows administrators to define roles with specific permissions for Prowler administrative tasks and Account visibility.
|
||||
|
||||
???+ note
|
||||
**Only users that have the _Manage Account_ or _admin_ permission can access this section.**
|
||||
|
||||
### Provider Groups
|
||||
|
||||
Provider Groups control visibility across specific providers. When creating a new role, you can assign specific groups to define their Cloud Provider visibility. This ensures that users with that role have access only to the Cloud Providers that are required.
|
||||
|
||||
By default, a new user role does not have visibility into any group.
|
||||
|
||||
Alternatively, to grant the role unlimited visibility across all providers, check the Grant Unlimited Visibility checkbox.
|
||||
|
||||
#### Creating a Provider Group
|
||||
|
||||
Follow these steps to create a provider group in your account:
|
||||
|
||||
1. 1. Navigate to **Provider Groups** from the side menu..
|
||||
2. In this view you can select the provider groups you want to assign to one or more roles.
|
||||
3. Click on the **Create Group** button on the center of the screen.
|
||||
|
||||
<img src="../img/rbac/provider_group.png" alt="Create Provider Group" width="700"/>
|
||||
|
||||
#### Editing a Provider Group
|
||||
|
||||
Follow these steps to edit a provider group on your account:
|
||||
|
||||
1. 1. Navigate to **Provider Groups** from the side menu..
|
||||
2. Click on the edit button of the provider group you want to modify.
|
||||
|
||||
<img src="../img/rbac/provider_group_edit.png" alt="Edit Provider Group" width="700"/>
|
||||
|
||||
3. Change the provider group parameters you need and save the changes.
|
||||
|
||||
<img src="../img/rbac/provider_group_edit_1.png" alt="Edit Provider Group Details" width="700"/>
|
||||
|
||||
#### Removing a Provider Group
|
||||
|
||||
Follow these steps to remove a provider group of your account:
|
||||
|
||||
1. 1. Navigate to **Provider Groups** from the side menu..
|
||||
2. Click on the delete button of the provider group you want to remove.
|
||||
|
||||
<img src="../img/rbac/provider_group_remove.png" alt="Remove Provider Group" width="700"/>
|
||||
|
||||
### Roles
|
||||
|
||||
#### Creating a Role
|
||||
|
||||
Follow these steps to create a role for your account:
|
||||
|
||||
1. Navigate to **Roles** from the side menu.
|
||||
2. Click on the **Add Role** button on the top right-hand corner of the screen.
|
||||
|
||||
<img src="../img/rbac/role_create.png" alt="Create Role" width="700"/>
|
||||
|
||||
3. In the Add Role screen, enter the role name, the administration permissions and the groups of providers to which the Role will have access to.
|
||||
4. In the Groups and Account Visibility section, you will see a list of available groups with checkboxes next to them. To assign a group to the user role, simply click the checkbox next to the group name. If you need to assign multiple groups, repeat the process for each group you wish to add.
|
||||
|
||||
<img src="../img/rbac/role_create_1.png" alt="Role parameters" width="700"/>
|
||||
|
||||
#### Editing a Role
|
||||
|
||||
Follow these steps to edit a role on your account:
|
||||
|
||||
1. Navigate to **Roles** from the side menu.
|
||||
2. Click on the edit button of the role you want to modify.
|
||||
|
||||
<img src="../img/rbac/role_edit.png" alt="Edit Role" width="700"/>
|
||||
|
||||
3. Adjust the settings as needed and save the changes.
|
||||
|
||||
<img src="../img/rbac/role_edit_details.png" alt="Edit Role Details" width="700"/>
|
||||
|
||||
#### Removing a Role
|
||||
|
||||
Follow these steps to remove a role of your account:
|
||||
|
||||
1. Navigate to **Roles** from the side menu.
|
||||
2. Click on the delete button of the role you want to remove.
|
||||
|
||||
<img src="../img/rbac/role_remove.png" alt="Remove Role" width="700"/>
|
||||
|
||||
## RBAC Administrative Permissions
|
||||
|
||||
Assign administrative permissions by selecting from the following options:
|
||||
|
||||
**Invite and Manage Users:** Invite new users and manage existing ones.<br>
|
||||
**Manage Account:** Adjust account settings and delete users.<br>
|
||||
**Manage Scans:** Run and review scans.<br>
|
||||
**Manage Cloud Providers:** Add or modify connected cloud providers.<br>
|
||||
**Manage Integrations:** Add or modify the Prowler Integrations.
|
||||
|
||||
To grant all administrative permissions, select the **Grant all admin permissions** option.
|
||||
@@ -1,52 +0,0 @@
|
||||
# Social Login Configuration
|
||||
|
||||
The **Prowler App** supports social login using Google and GitHub OAuth providers. This document guides you through configuring the required environment variables to enable social authentication.
|
||||
|
||||
<img src="../img/social-login/social_login_buttons.png" alt="Social login buttons" width="700" />
|
||||
|
||||
## Configuring Social Login Credentials
|
||||
|
||||
To enable social login with Google and GitHub, you must define the following environment variables:
|
||||
|
||||
### Google OAuth Configuration
|
||||
|
||||
Set the following environment variables for Google OAuth:
|
||||
|
||||
```env
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
```
|
||||
|
||||
### GitHub OAuth Configuration
|
||||
|
||||
Set the following environment variables for GitHub OAuth:
|
||||
|
||||
```env
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
```
|
||||
|
||||
### Important Notes
|
||||
|
||||
- If either `SOCIAL_GOOGLE_OAUTH_CLIENT_ID` or `SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET` is empty or not defined, the Google login button will be disabled.
|
||||
- If either `SOCIAL_GITHUB_OAUTH_CLIENT_ID` or `SOCIAL_GITHUB_OAUTH_CLIENT_SECRET` is empty or not defined, the GitHub login button will be disabled.
|
||||
|
||||
|
||||
<img src="../img/social-login/social_login_buttons_disabled.png" alt="Social login buttons disabled" width="700" />
|
||||
|
||||
## Obtaining OAuth Credentials
|
||||
|
||||
To obtain `CLIENT_ID` and `CLIENT_SECRET` for each provider, follow their official documentation:
|
||||
|
||||
- **Google OAuth**: [Google OAuth Credentials Setup](https://developers.google.com/identity/protocols/oauth2)
|
||||
- **GitHub OAuth**: [GitHub OAuth App Setup](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app)
|
||||
|
||||
### Steps Overview
|
||||
|
||||
For both providers, the process generally involves:
|
||||
|
||||
1. Registering your application in the provider's developer portal.
|
||||
2. Defining the authorized redirect URL (`SOCIAL_<PROVIDER>_OAUTH_CALLBACK_URL`).
|
||||
3. Copying the generated `CLIENT_ID` and `CLIENT_SECRET` into the corresponding environment variables.
|
||||
|
||||
Once completed, ensure your environment variables are correctly loaded in your Prowler deployment to activate social login.
|
||||
@@ -9,23 +9,11 @@ You can also access to the auto-generated **Prowler API** documentation at [http
|
||||
If you are a [Prowler Cloud](https://cloud.prowler.com/sign-in) user you can see API docs at [https://api.prowler.com/api/v1/docs](https://api.prowler.com/api/v1/docs)
|
||||
|
||||
## **Step 1: Sign Up**
|
||||
### **Sign up with Email**
|
||||
To get started, sign up using your email and password:
|
||||
|
||||
<img src="../../img/sign-up-button.png" alt="Sign Up Button" width="320"/>
|
||||
<img src="../../img/sign-up.png" alt="Sign Up" width="285"/>
|
||||
|
||||
### **Sign up with Social Login**
|
||||
|
||||
If Social Login is enabled, you can sign up using your preferred provider (e.g., Google, GitHub).
|
||||
|
||||
???+ note "How Social Login Works"
|
||||
- If your email is already registered, you will be logged in, and your social account will be linked.
|
||||
- If your email is not registered, a new account will be created using your social account email.
|
||||
|
||||
???+ note "Enable Social Login"
|
||||
See [how to configure Social Login for Prowler](prowler-app-social-login.md) to enable this feature in your own deployments.
|
||||
|
||||
---
|
||||
|
||||
## **Step 2: Log In**
|
||||
@@ -184,18 +172,3 @@ While the scan is running, start exploring the findings in these sections:
|
||||
- **Browse All Findings**: Detailed list of findings detected, where you can filter by severity, service, and more. <img src="../../img/findings.png" alt="Findings" width="700"/>
|
||||
|
||||
To view all `new` findings that have not been seen prior to this scan, click the `Delta` filter and select `new`. To view all `changed` findings that have had a status change (from `PASS` to `FAIL` for example), click the `Delta` filter and select `changed`.
|
||||
|
||||
## **Step 9: Download the Outputs**
|
||||
|
||||
Once the scan is complete, you can download the output files generated by Prowler as a single `zip` file. This archive contains the CSV, JSON-OSCF, and HTML reports detailing the findings.
|
||||
|
||||
To download these files, click the **Download** button. This button becomes available only after the scan has finished.
|
||||
|
||||
<img src="../../img/download_output.png" alt="Download output" width="700"/>
|
||||
|
||||
This action downloads a `zip` file containing an `output` folder, which includes the files mentioned above: CSV, JSON-OSCF, and HTML reports.
|
||||
|
||||
<img src="../../img/output_folder.png" alt="Output folder" width="700"/>
|
||||
|
||||
???+ note "API Note"
|
||||
To learn more about the API endpoint the UI uses to download ZIP exports, see: [Prowler API Reference - Download Scan Output](https://api.prowler.com/api/v1/docs#tag/Scan/operation/scans_report_retrieve)
|
||||
|
||||
@@ -120,147 +120,117 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
|
||||
|
||||
```json
|
||||
[{
|
||||
"message": "Potential secrets found in ECS task definition manufacturer-api with revision 7: Secrets in container manufacturer-api -> Secret Keyword on the environment variable DB_PASSWORD.",
|
||||
"metadata": {
|
||||
"event_code": "ecs_task_definitions_no_environment_secrets",
|
||||
"product": {
|
||||
"name": "Prowler",
|
||||
"uid": "prowler",
|
||||
"vendor_name": "Prowler",
|
||||
"version": "5.3.0"
|
||||
},
|
||||
"profiles": [
|
||||
"cloud",
|
||||
"datetime"
|
||||
],
|
||||
"tenant_uid": "",
|
||||
"version": "1.3.0"
|
||||
},
|
||||
"severity_id": 5,
|
||||
"severity": "Critical",
|
||||
"status": "New",
|
||||
"status_code": "FAIL",
|
||||
"status_detail": "Potential secrets found in ECS task definition manufacturer-api with revision 7: Secrets in container manufacturer-api -> Secret Keyword on the environment variable DB_PASSWORD.",
|
||||
"status_id": 1,
|
||||
"unmapped": {
|
||||
"related_url": "",
|
||||
"categories": [
|
||||
"secrets"
|
||||
],
|
||||
"depends_on": [],
|
||||
"related_to": [],
|
||||
"notes": "",
|
||||
"compliance": {
|
||||
"MITRE-ATTACK": [
|
||||
"T1552"
|
||||
],
|
||||
"AWS-Foundational-Security-Best-Practices": [
|
||||
"ecs"
|
||||
],
|
||||
"KISA-ISMS-P-2023": [
|
||||
"2.7.1",
|
||||
"2.11.2"
|
||||
],
|
||||
"KISA-ISMS-P-2023-korean": [
|
||||
"2.7.1",
|
||||
"2.11.2"
|
||||
],
|
||||
"AWS-Well-Architected-Framework-Security-Pillar": [
|
||||
"SEC02-BP03"
|
||||
]
|
||||
}
|
||||
},
|
||||
"activity_name": "Create",
|
||||
"activity_id": 1,
|
||||
"finding_info": {
|
||||
"created_time": 1737995806,
|
||||
"created_time_dt": "2025-01-27T17:36:46.855898",
|
||||
"desc": "Check if secrets exists in ECS task definitions environment variables.",
|
||||
"product_uid": "prowler",
|
||||
"title": "Check if secrets exists in ECS task definitions environment variables",
|
||||
"types": [
|
||||
"Protect",
|
||||
"Secure development",
|
||||
"Credentials not hard-coded"
|
||||
],
|
||||
"uid": "prowler-aws-ecs_task_definitions_no_environment_secrets-123456789012-eu-central-1-manufacturer-api:7"
|
||||
},
|
||||
"resources": [
|
||||
{
|
||||
"cloud_partition": "aws",
|
||||
"region": "eu-central-1",
|
||||
"data": {
|
||||
"details": "",
|
||||
"metadata": {
|
||||
"name": "manufacturer-api",
|
||||
"arn": "arn:aws:ecs:eu-central-1:123456789012:task-definition/manufacturer-api:7",
|
||||
"revision": "7",
|
||||
"region": "eu-central-1",
|
||||
"container_definitions": [
|
||||
{
|
||||
"name": "manufacturer-api",
|
||||
"privileged": false,
|
||||
"readonly_rootfilesystem": false,
|
||||
"user": "",
|
||||
"environment": [
|
||||
{
|
||||
"name": "DB_HOST",
|
||||
"value": "some.cluster.eu-central-1.rds.amazonaws.com"
|
||||
},
|
||||
{
|
||||
"name": "DB_PASSWORD",
|
||||
"value": "somePassword"
|
||||
}
|
||||
],
|
||||
"log_driver": "",
|
||||
"log_option": ""
|
||||
}
|
||||
],
|
||||
"pid_mode": "",
|
||||
"tags": [],
|
||||
"network_mode": "awsvpc"
|
||||
}
|
||||
},
|
||||
"group": {
|
||||
"name": "ecs"
|
||||
},
|
||||
"labels": [],
|
||||
"name": "manufacturer-api:7",
|
||||
"type": "AwsEcsTaskDefinition",
|
||||
"uid": "arn:aws:ecs:eu-central-1:123456789012:task-definition/manufacturer-api:7"
|
||||
}
|
||||
],
|
||||
"category_name": "Findings",
|
||||
"category_uid": 2,
|
||||
"class_name": "Detection Finding",
|
||||
"class_uid": 2004,
|
||||
"cloud": {
|
||||
"account": {
|
||||
"name": "",
|
||||
"type": "AWS Account",
|
||||
"type_id": 10,
|
||||
"uid": "123456789012",
|
||||
"labels": []
|
||||
},
|
||||
"org": {
|
||||
"name": "",
|
||||
"uid": ""
|
||||
},
|
||||
"provider": "aws",
|
||||
"region": "eu-central-1"
|
||||
},
|
||||
"remediation": {
|
||||
"desc": "Use Secrets Manager or Parameter Store to securely provide credentials to containers without hardcoding the secrets in code or passing them through environment variables. It is currently not possible to delete task definition revisions which contain plaintext secrets. AWS is looking into implementing this feature in 2023, and it is therefore recommended that all plaintext secrets are rotated at the same time as moving the secrets to Secrets Manager or Parameter Store.",
|
||||
"references": [
|
||||
"https://docs.aws.amazon.com/AmazonECS/latest/developerguide/specifying-sensitive-data.html"
|
||||
]
|
||||
},
|
||||
"risk_details": "The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used, it is possible that malicious users gain access through the account in question.",
|
||||
"time": 1737995806,
|
||||
"time_dt": "2025-01-27T17:36:46.855898",
|
||||
"type_uid": 200401,
|
||||
"type_name": "Detection Finding: Create"
|
||||
}]
|
||||
"metadata": {
|
||||
"event_code": "cloudtrail_multi_region_enabled",
|
||||
"product": {
|
||||
"name": "Prowler",
|
||||
"vendor_name": "Prowler",
|
||||
"version": "4.2.4"
|
||||
},
|
||||
"version": "1.1.0"
|
||||
},
|
||||
"severity_id": 4,
|
||||
"severity": "High",
|
||||
"status": "New",
|
||||
"status_code": "FAIL",
|
||||
"status_detail": "No CloudTrail trails enabled and logging were found.",
|
||||
"status_id": 1,
|
||||
"activity_name": "Create",
|
||||
"activity_id": 1,
|
||||
"finding_info": {
|
||||
"created_time": "2024-04-08T11:33:51.870861",
|
||||
"desc": "Ensure CloudTrail is enabled in all regions",
|
||||
"product_uid": "prowler",
|
||||
"title": "Ensure CloudTrail is enabled in all regions",
|
||||
"uid": "prowler-aws-cloudtrail_multi_region_enabled-123456789012-ap-northeast-1-123456789012",
|
||||
"types": ["Software and Configuration Checks","Industry and Regulatory Standards","CIS AWS Foundations Benchmark"],
|
||||
},
|
||||
"resources": [
|
||||
{
|
||||
"cloud_partition": "aws",
|
||||
"region": "ap-northeast-1",
|
||||
"group": {
|
||||
"name": "cloudtrail"
|
||||
},
|
||||
"labels": [],
|
||||
"name": "123456789012",
|
||||
"type": "AwsCloudTrailTrail",
|
||||
"uid": "arn:aws:cloudtrail:ap-northeast-1:123456789012:trail",
|
||||
"data": {
|
||||
"details": ""
|
||||
},
|
||||
}
|
||||
],
|
||||
"category_name": "Findings",
|
||||
"category_uid": 2,
|
||||
"class_name": "DetectionFinding",
|
||||
"class_uid": 2004,
|
||||
"cloud": {
|
||||
"account": {
|
||||
"name": "test-account",
|
||||
"type": "AWS_Account",
|
||||
"type_id": 10,
|
||||
"uid": "123456789012"
|
||||
},
|
||||
"org": {
|
||||
"name": "",
|
||||
"uid": ""
|
||||
},
|
||||
"provider": "aws",
|
||||
"region": "ap-northeast-1"
|
||||
},
|
||||
"event_time": "2024-04-08T11:33:51.870861",
|
||||
"remediation": {
|
||||
"desc": "Ensure Logging is set to ON on all regions (even if they are not being used at the moment.",
|
||||
"references": [
|
||||
"aws cloudtrail create-trail --name <trail_name> --bucket-name <s3_bucket_for_cloudtrail> --is-multi-region-trail aws cloudtrail update-trail --name <trail_name> --is-multi-region-trail ",
|
||||
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrailconcepts.html#cloudtrail-concepts-management-events"
|
||||
]
|
||||
},
|
||||
"type_uid": 200401,
|
||||
"type_name": "Create",
|
||||
"unmapped": {
|
||||
"related_url": "",
|
||||
"categories": ["forensics-ready"],
|
||||
"depends_on": [],
|
||||
"related_to": [],
|
||||
"notes": "",
|
||||
"compliance": {
|
||||
"CISA": [
|
||||
"your-systems-3",
|
||||
"your-data-2"
|
||||
],
|
||||
"SOC2": [
|
||||
"cc_2_1",
|
||||
"cc_7_2",
|
||||
"cc_a_1_2"
|
||||
],
|
||||
"CIS-1.4": [
|
||||
"3.1"
|
||||
],
|
||||
"CIS-1.5": [
|
||||
"3.1"
|
||||
],
|
||||
"GDPR": [
|
||||
"article_25",
|
||||
"article_30"
|
||||
],
|
||||
"AWS-Foundational-Security-Best-Practices": [
|
||||
"cloudtrail"
|
||||
],
|
||||
"ISO27001-2013": [
|
||||
"A.12.4"
|
||||
],
|
||||
"HIPAA": [
|
||||
"164_308_a_1_ii_d",
|
||||
"164_308_a_3_ii_a",
|
||||
"164_308_a_6_ii",
|
||||
"164_312_b",
|
||||
"164_312_e_2_i"
|
||||
],
|
||||
}
|
||||
},
|
||||
}]
|
||||
```
|
||||
|
||||
???+ note
|
||||
@@ -361,7 +331,7 @@ The following is the mapping between the native JSON and the Detection Finding f
|
||||
|
||||
| Native JSON Prowler v3 | JSON-OCSF v.1.1.0 |
|
||||
| --- |---|
|
||||
| AssessmentStartTime | time_dt |
|
||||
| AssessmentStartTime | event_time |
|
||||
| FindingUniqueId | finding_info.uid |
|
||||
| Provider | cloud.provider |
|
||||
| CheckID | metadata.event_code |
|
||||
|
||||
40
mkdocs.yml
@@ -49,28 +49,24 @@ nav:
|
||||
- Overview: index.md
|
||||
- Requirements: getting-started/requirements.md
|
||||
- Tutorials:
|
||||
- Prowler App:
|
||||
- Getting Started: tutorials/prowler-app.md
|
||||
- Role-Based Access Control: tutorials/prowler-app-rbac.md
|
||||
- Social Login: tutorials/prowler-app-social-login.md
|
||||
- CLI:
|
||||
- Miscellaneous: tutorials/misc.md
|
||||
- Reporting: tutorials/reporting.md
|
||||
- Compliance: tutorials/compliance.md
|
||||
- Dashboard: tutorials/dashboard.md
|
||||
- Fixer (remediations): tutorials/fixer.md
|
||||
- Quick Inventory: tutorials/quick-inventory.md
|
||||
- Slack Integration: tutorials/integrations.md
|
||||
- Configuration File: tutorials/configuration_file.md
|
||||
- Logging: tutorials/logging.md
|
||||
- Mutelist: tutorials/mutelist.md
|
||||
- Check Aliases: tutorials/check-aliases.md
|
||||
- Custom Metadata: tutorials/custom-checks-metadata.md
|
||||
- Scan Unused Services: tutorials/scan-unused-services.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- Parallel Execution: tutorials/parallel-execution.md
|
||||
- Developer Guide: developer-guide/introduction.md
|
||||
- Prowler Check Kreator: tutorials/prowler-check-kreator.md
|
||||
- Prowler App: tutorials/prowler-app.md
|
||||
- Miscellaneous: tutorials/misc.md
|
||||
- Reporting: tutorials/reporting.md
|
||||
- Compliance: tutorials/compliance.md
|
||||
- Dashboard: tutorials/dashboard.md
|
||||
- Fixer (remediations): tutorials/fixer.md
|
||||
- Quick Inventory: tutorials/quick-inventory.md
|
||||
- Slack Integration: tutorials/integrations.md
|
||||
- Configuration File: tutorials/configuration_file.md
|
||||
- Logging: tutorials/logging.md
|
||||
- Mutelist: tutorials/mutelist.md
|
||||
- Check Aliases: tutorials/check-aliases.md
|
||||
- Custom Metadata: tutorials/custom-checks-metadata.md
|
||||
- Scan Unused Services: tutorials/scan-unused-services.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- Parallel Execution: tutorials/parallel-execution.md
|
||||
- Developer Guide: developer-guide/introduction.md
|
||||
- Prowler Check Kreator: tutorials/prowler-check-kreator.md
|
||||
- AWS:
|
||||
- Authentication: tutorials/aws/authentication.md
|
||||
- Assume Role: tutorials/aws/role-assumption.md
|
||||
|
||||
371
poetry.lock
generated
@@ -58,11 +58,6 @@ from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
|
||||
KubernetesISO27001,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
@@ -526,19 +521,6 @@ def prowler():
|
||||
)
|
||||
generated_outputs["compliance"].append(ens)
|
||||
ens.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("iso27001_"):
|
||||
# Generate ISO27001 Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
iso27001 = AzureISO27001(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(iso27001)
|
||||
iso27001.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
@@ -593,19 +575,6 @@ def prowler():
|
||||
)
|
||||
generated_outputs["compliance"].append(ens)
|
||||
ens.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("iso27001_"):
|
||||
# Generate ISO27001 Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
iso27001 = GCPISO27001(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(iso27001)
|
||||
iso27001.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
@@ -634,19 +603,6 @@ def prowler():
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("iso27001_"):
|
||||
# Generate ISO27001 Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
iso27001 = KubernetesISO27001(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(iso27001)
|
||||
iso27001.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
|
||||