Compare commits
144 Commits
api-add-mi
...
PRWLR-7635
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d9865d2816 | ||
|
|
cb66b3289b | ||
|
|
5a51a6f76f | ||
|
|
6ce9e144d0 | ||
|
|
e5b8215819 | ||
|
|
946f49df44 | ||
|
|
e4d3a51b9e | ||
|
|
3ff810405a | ||
|
|
0f30b4fe79 | ||
|
|
ad3f0d7d92 | ||
|
|
834a7d3b69 | ||
|
|
24a50c6ac2 | ||
|
|
ec8afd773f | ||
|
|
a09be4c0ba | ||
|
|
4b62fdcf53 | ||
|
|
bf0013dae3 | ||
|
|
c82cd5288c | ||
|
|
ad31a6b3f5 | ||
|
|
20c7c9f8de | ||
|
|
0cfe41e452 | ||
|
|
1b254feadc | ||
|
|
15954d8a01 | ||
|
|
ff122c9779 | ||
|
|
a012397e55 | ||
|
|
7da6d7b5dd | ||
|
|
db6a27d1f5 | ||
|
|
e07c833cab | ||
|
|
728fc9d6ff | ||
|
|
cf9ff78605 | ||
|
|
a2faf548af | ||
|
|
8bcec4926b | ||
|
|
a4e96f809b | ||
|
|
fa27255dd7 | ||
|
|
05360e469f | ||
|
|
9d405ddcbd | ||
|
|
430f831543 | ||
|
|
da9d7199b7 | ||
|
|
d63a383ec6 | ||
|
|
55c226029e | ||
|
|
8d2f6aa30c | ||
|
|
a319f80701 | ||
|
|
15a8671f0d | ||
|
|
d34e709d91 | ||
|
|
ddc53c3c6d | ||
|
|
a3aef18cfe | ||
|
|
49ca3ca325 | ||
|
|
89c67079a3 | ||
|
|
2de8075d87 | ||
|
|
e124275dbf | ||
|
|
760d28e752 | ||
|
|
3fb0733887 | ||
|
|
7de9a37edb | ||
|
|
fe00b788cc | ||
|
|
4c50f4d811 | ||
|
|
c0c736bffe | ||
|
|
a3aa7d0a63 | ||
|
|
3ceb86c4d9 | ||
|
|
3628e7b3e8 | ||
|
|
f29c2ac9f0 | ||
|
|
b4927c3ad1 | ||
|
|
19f3c1d310 | ||
|
|
cd97e57521 | ||
|
|
b38207507a | ||
|
|
ab96e0aac0 | ||
|
|
4477cecc59 | ||
|
|
641d671312 | ||
|
|
e7c2fa0699 | ||
|
|
7eb08b0f14 | ||
|
|
6f3112f754 | ||
|
|
f5ecae6da1 | ||
|
|
1c75f6b804 | ||
|
|
91b64d8572 | ||
|
|
233ae74560 | ||
|
|
fac97f9785 | ||
|
|
e81c7a3893 | ||
|
|
d6f26df2e8 | ||
|
|
ece74e15fd | ||
|
|
eea6d07259 | ||
|
|
4a6d7a5be2 | ||
|
|
883c5d4e56 | ||
|
|
f1f998c2fa | ||
|
|
5276e38f1d | ||
|
|
ad98a4747f | ||
|
|
5798321dc6 | ||
|
|
bf58728d29 | ||
|
|
fcea3b6570 | ||
|
|
965111245a | ||
|
|
f78a29206c | ||
|
|
c719d705e0 | ||
|
|
8948ee6868 | ||
|
|
24fb31e98f | ||
|
|
c8b193e658 | ||
|
|
6d27738c4d | ||
|
|
17b7becfdf | ||
|
|
cfa7f271d2 | ||
|
|
e61a97cb65 | ||
|
|
cd4a1ad8a7 | ||
|
|
e650d19a30 | ||
|
|
f930739a3d | ||
|
|
89fc698a0e | ||
|
|
6acb6bbf8e | ||
|
|
971424f822 | ||
|
|
9ba1ae1ced | ||
|
|
062db4cc70 | ||
|
|
dc4db10c41 | ||
|
|
68a542ef64 | ||
|
|
32f3787e18 | ||
|
|
6792bea319 | ||
|
|
ae4b43c137 | ||
|
|
d576c4f1c4 | ||
|
|
ddc0596aa2 | ||
|
|
636bdb6d0a | ||
|
|
4a839b0146 | ||
|
|
73e244dce5 | ||
|
|
d8ed70236b | ||
|
|
bcc96ab4f2 | ||
|
|
fd53a8c9d0 | ||
|
|
7b58d1dd56 | ||
|
|
7858c147f7 | ||
|
|
8e635b3bd4 | ||
|
|
2e97e37316 | ||
|
|
cd804836a1 | ||
|
|
d102ee2fd5 | ||
|
|
325e5739a2 | ||
|
|
98da3059b4 | ||
|
|
80fd5d1ba6 | ||
|
|
85242c7909 | ||
|
|
ea6ab406c8 | ||
|
|
cbf2a28bac | ||
|
|
5b1e7bb7f9 | ||
|
|
e108b2caed | ||
|
|
df1abb2152 | ||
|
|
e0465f2aa2 | ||
|
|
51467767cd | ||
|
|
bc71e7fb3b | ||
|
|
6a331c05e8 | ||
|
|
7ab503a096 | ||
|
|
b368190c9f | ||
|
|
8915fdff18 | ||
|
|
9bf108e9cc | ||
|
|
87708e39cf | ||
|
|
44927c44e9 | ||
|
|
71aa29cf24 | ||
|
|
aa14daf0db |
6
.env
@@ -6,6 +6,7 @@
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
@@ -130,7 +131,7 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.6.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.7.5
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
@@ -142,8 +143,7 @@ SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
# Single Sign-On (SSO)
|
||||
SAML_PUBLIC_CERT=""
|
||||
SAML_PRIVATE_KEY=""
|
||||
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
|
||||
|
||||
# Lighthouse tracing
|
||||
LANGSMITH_TRACING=false
|
||||
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
|
||||
4
.github/workflows/api-codeql.yml
vendored
@@ -48,12 +48,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
12
.github/workflows/api-pull-request.yml
vendored
@@ -136,12 +136,6 @@ jobs:
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Prevents known compatibility error between lxml and libxml2/libxmlsec versions - https://github.com/xmlsec/python-xmlsec/issues/320
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pip install --force-reinstall --no-binary lxml lxml
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -169,9 +163,9 @@ jobs:
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
# 76352 and 76353 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
@@ -211,7 +205,7 @@ jobs:
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
|
||||
2
.github/workflows/find-secrets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@90694bf9af66e7536abc5824e7a87246dbf933cb # v3.88.35
|
||||
uses: trufflesecurity/trufflehog@6641d4ba5b684fffe195b9820345de1bf19f3181 # v3.89.2
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
@@ -9,6 +9,8 @@ jobs:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler"
|
||||
@@ -45,6 +47,7 @@ jobs:
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
|
||||
with:
|
||||
@@ -53,7 +56,7 @@ jobs:
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Comment on PR if changelog is missing
|
||||
if: steps.check_folders.outputs.missing_changelogs != ''
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs != ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
@@ -67,7 +70,7 @@ jobs:
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.
|
||||
|
||||
- name: Comment on PR if all changelogs are present
|
||||
if: steps.check_folders.outputs.missing_changelogs == ''
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs == ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
|
||||
2
.github/workflows/sdk-bump-version.yml
vendored
@@ -97,6 +97,7 @@ jobs:
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -135,6 +136,7 @@ jobs:
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
4
.github/workflows/sdk-codeql.yml
vendored
@@ -56,12 +56,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
9
.github/workflows/sdk-pull-request.yml
vendored
@@ -102,8 +102,15 @@ jobs:
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
|
||||
@@ -30,6 +30,7 @@ env:
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -76,7 +77,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
@@ -86,6 +87,7 @@ jobs:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
@@ -101,6 +103,7 @@ jobs:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
|
||||
4
.github/workflows/ui-codeql.yml
vendored
@@ -48,12 +48,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
53
.github/workflows/ui-pull-request.yml
vendored
@@ -34,21 +34,70 @@ jobs:
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm install
|
||||
run: npm ci
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
|
||||
e2e-tests:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AUTH_SECRET: 'fallback-ci-secret-for-testing'
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: http://localhost:3000
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Run Playwright tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
- name: Upload Playwright report
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
|
||||
10
.gitignore
vendored
@@ -44,6 +44,16 @@ junit-reports/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
.cursor/
|
||||
|
||||
# RooCode files
|
||||
.roo/
|
||||
.rooignore
|
||||
.roomodes
|
||||
|
||||
# Cline files
|
||||
.cline/
|
||||
.clineignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
FROM python:3.12.11-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
@@ -6,7 +6,8 @@ LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
@@ -46,10 +47,6 @@ ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
# By default poetry does not compile Python source files to bytecode during installation.
|
||||
# This speeds up the installation process, but the first execution may take a little more
|
||||
# time because Python then compiles source files to bytecode automatically. If you want to
|
||||
# compile source files to bytecode during installation, you can use the --compile option
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
|
||||
@@ -136,6 +136,14 @@ If your workstation's architecture is incompatible, you can resolve this by:
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
|
||||
### Common Issues with Docker Pull Installation
|
||||
|
||||
> [!Note]
|
||||
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
|
||||
|
||||
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
|
||||
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
@@ -2,16 +2,59 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [v1.9.0] (Prowler UNRELEASED)
|
||||
## [v1.10.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- SSO with SAML support [(#8175)](https://github.com/prowler-cloud/prowler/pull/8175)
|
||||
- `GET /resources/metadata`, `GET /resources/metadata/latest` and `GET /resources/latest` to expose resource metadata and latest scan results [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
|
||||
### Changed
|
||||
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
|
||||
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
|
||||
|
||||
### Fixed
|
||||
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
|
||||
|
||||
### Changed
|
||||
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
|
||||
|
||||
### Security
|
||||
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.1] (Prowler v5.8.1)
|
||||
|
||||
### Added
|
||||
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
|
||||
|
||||
### Changed
|
||||
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
|
||||
|
||||
### Fixed
|
||||
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
|
||||
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
|
||||
|
||||
### Removed
|
||||
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.0] (Prowler v5.8.0)
|
||||
|
||||
### Added
|
||||
- SSO with SAML support [(#7822)](https://github.com/prowler-cloud/prowler/pull/7822)
|
||||
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
|
||||
|
||||
### Changed
|
||||
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
|
||||
|
||||
### Fixed
|
||||
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -57,10 +57,6 @@ RUN poetry install --no-root && \
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
# Prevents known compatibility error between lxml and libxml2/libxmlsec versions.
|
||||
# See: https://github.com/xmlsec/python-xmlsec/issues/320
|
||||
RUN poetry run pip install --force-reinstall --no-binary lxml lxml
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
|
||||
@@ -257,7 +257,7 @@ cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
|
||||
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
|
||||
|
||||
## Run tests
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
440
api/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -880,7 +880,6 @@ description = "Foreign Function Interface for Python calling C code."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "dev"]
|
||||
markers = "platform_python_implementation != \"PyPy\""
|
||||
files = [
|
||||
{file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
|
||||
@@ -950,6 +949,7 @@ files = [
|
||||
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
|
||||
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
|
||||
]
|
||||
markers = {dev = "platform_python_implementation != \"PyPy\""}
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
@@ -2684,144 +2684,150 @@ adal = ["adal (>=1.0.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "lxml"
|
||||
version = "5.4.0"
|
||||
version = "5.3.2"
|
||||
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"},
|
||||
{file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"},
|
||||
{file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"},
|
||||
{file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"},
|
||||
{file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"},
|
||||
{file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"},
|
||||
{file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"},
|
||||
{file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"},
|
||||
{file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"},
|
||||
{file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"},
|
||||
{file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"},
|
||||
{file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"},
|
||||
{file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c4b84d6b580a9625dfa47269bf1fd7fbba7ad69e08b16366a46acb005959c395"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4c08ecb26e4270a62f81f81899dfff91623d349e433b126931c9c4577169666"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef926e9f11e307b5a7c97b17c5c609a93fb59ffa8337afac8f89e6fe54eb0b37"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017ceeabe739100379fe6ed38b033cd244ce2da4e7f6f07903421f57da3a19a2"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dae97d9435dc90590f119d056d233c33006b2fd235dd990d5564992261ee7ae8"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:910f39425c6798ce63c93976ae5af5fff6949e2cb446acbd44d6d892103eaea8"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9780de781a0d62a7c3680d07963db3048b919fc9e3726d9cfd97296a65ffce1"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:1a06b0c6ba2e3ca45a009a78a4eb4d6b63831830c0a83dcdc495c13b9ca97d3e"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:4c62d0a34d1110769a1bbaf77871a4b711a6f59c4846064ccb78bc9735978644"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:8f961a4e82f411b14538fe5efc3e6b953e17f5e809c463f0756a0d0e8039b700"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3dfc78f5f9251b6b8ad37c47d4d0bfe63ceb073a916e5b50a3bf5fd67a703335"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10e690bc03214d3537270c88e492b8612d5e41b884f232df2b069b25b09e6711"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa837e6ee9534de8d63bc4c1249e83882a7ac22bd24523f83fad68e6ffdf41ae"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:da4c9223319400b97a2acdfb10926b807e51b69eb7eb80aad4942c0516934858"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dc0e9bdb3aa4d1de703a437576007d366b54f52c9897cae1a3716bb44fc1fc85"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-win32.win32.whl", hash = "sha256:dd755a0a78dd0b2c43f972e7b51a43be518ebc130c9f1a7c4480cf08b4385486"},
|
||||
{file = "lxml-5.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:d64ea1686474074b38da13ae218d9fde0d1dc6525266976808f41ac98d9d7980"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9d61a7d0d208ace43986a92b111e035881c4ed45b1f5b7a270070acae8b0bfb4"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856dfd7eda0b75c29ac80a31a6411ca12209183e866c33faf46e77ace3ce8a79"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a01679e4aad0727bedd4c9407d4d65978e920f0200107ceeffd4b019bd48529"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6b37b4c3acb8472d191816d4582379f64d81cecbdce1a668601745c963ca5cc"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3df5a54e7b7c31755383f126d3a84e12a4e0333db4679462ef1165d702517477"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c09a40f28dcded933dc16217d6a092be0cc49ae25811d3b8e937c8060647c353"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1ef20f1851ccfbe6c5a04c67ec1ce49da16ba993fdbabdce87a92926e505412"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f79a63289dbaba964eb29ed3c103b7911f2dce28c36fe87c36a114e6bd21d7ad"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:75a72697d95f27ae00e75086aed629f117e816387b74a2f2da6ef382b460b710"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:b9b00c9ee1cc3a76f1f16e94a23c344e0b6e5c10bec7f94cf2d820ce303b8c01"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:77cbcab50cbe8c857c6ba5f37f9a3976499c60eada1bf6d38f88311373d7b4bc"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:29424058f072a24622a0a15357bca63d796954758248a72da6d512f9bd9a4493"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7d82737a8afe69a7c80ef31d7626075cc7d6e2267f16bf68af2c764b45ed68ab"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:95473d1d50a5d9fcdb9321fdc0ca6e1edc164dce4c7da13616247d27f3d21e31"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2162068f6da83613f8b2a32ca105e37a564afd0d7009b0b25834d47693ce3538"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-win32.whl", hash = "sha256:f8695752cf5d639b4e981afe6c99e060621362c416058effd5c704bede9cb5d1"},
|
||||
{file = "lxml-5.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:d1a94cbb4ee64af3ab386c2d63d6d9e9cf2e256ac0fd30f33ef0a3c88f575174"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71"},
|
||||
{file = "lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1"},
|
||||
{file = "lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1a59f7fe888d0ec1916d0ad69364c5400cfa2f885ae0576d909f342e94d26bc9"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d67b50abc2df68502a26ed2ccea60c1a7054c289fb7fc31c12e5e55e4eec66bd"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cb08d2cb047c98d6fbbb2e77d6edd132ad6e3fa5aa826ffa9ea0c9b1bc74a84"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:495ddb7e10911fb4d673d8aa8edd98d1eadafb3b56e8c1b5f427fd33cadc455b"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:884d9308ac7d581b705a3371185282e1b8eebefd68ccf288e00a2d47f077cc51"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:37f3d7cf7f2dd2520df6cc8a13df4c3e3f913c8e0a1f9a875e44f9e5f98d7fee"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-win32.whl", hash = "sha256:e885a1bf98a76dff0a0648850c3083b99d9358ef91ba8fa307c681e8e0732503"},
|
||||
{file = "lxml-5.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:b45f505d0d85f4cdd440cd7500689b8e95110371eaa09da0c0b1103e9a05030f"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b53cd668facd60b4f0dfcf092e01bbfefd88271b5b4e7b08eca3184dd006cb30"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5dea998c891f082fe204dec6565dbc2f9304478f2fc97bd4d7a940fec16c873"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46bc3e58b01e4f38d75e0d7f745a46875b7a282df145aca9d1479c65ff11561"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:661feadde89159fd5f7d7639a81ccae36eec46974c4a4d5ccce533e2488949c8"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:43af2a69af2cacc2039024da08a90174e85f3af53483e6b2e3485ced1bf37151"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:1539f962d82436f3d386eb9f29b2a29bb42b80199c74a695dff51b367a61ec0a"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:6673920bf976421b5fac4f29b937702eef4555ee42329546a5fc68bae6178a48"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9fa722a9cd8845594593cce399a49aa6bfc13b6c83a7ee05e2ab346d9253d52f"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-win32.whl", hash = "sha256:2eadd4efa487f4710755415aed3d6ae9ac8b4327ea45226ffccb239766c8c610"},
|
||||
{file = "lxml-5.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:83d8707b1b08cd02c04d3056230ec3b771b18c566ec35e723e60cdf037064e08"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc6e8678bfa5ccba370103976ccfcf776c85c83da9220ead41ea6fd15d2277b4"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bed509662f67f719119ad56006cd4a38efa68cfa74383060612044915e5f7ad"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3925975fadd6fd72a6d80541a6ec75dfbad54044a03aa37282dafcb80fbdfa"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83c0462dedc5213ac586164c6d7227da9d4d578cf45dd7fbab2ac49b63a008eb"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:53e3f9ca72858834688afa17278649d62aa768a4b2018344be00c399c4d29e95"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:32ba634ef3f1b20f781019a91d78599224dc45745dd572f951adbf1c0c9b0d75"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1b16504c53f41da5fcf04868a80ac40a39d3eec5329caf761114caec6e844ad1"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1f9682786138549da44ca4c49b20e7144d063b75f2b2ba611f4cff9b83db1062"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-win32.whl", hash = "sha256:d8f74ef8aacdf6ee5c07566a597634bb8535f6b53dc89790db43412498cf6026"},
|
||||
{file = "lxml-5.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:49f1cee0fa27e1ee02589c696a9bdf4027e7427f184fa98e6bef0c6613f6f0fa"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:741c126bcf9aa939e950e64e5e0a89c8e01eda7a5f5ffdfc67073f2ed849caea"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ab6e9e6aca1fd7d725ffa132286e70dee5b9a4561c5ed291e836440b82888f89"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58e8c9b9ed3c15c2d96943c14efc324b69be6352fe5585733a7db2bf94d97841"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7811828ddfb8c23f4f1fbf35e7a7b2edec2f2e4c793dee7c52014f28c4b35238"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72968623efb1e12e950cbdcd1d0f28eb14c8535bf4be153f1bfffa818b1cf189"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebfceaa2ea588b54efb6160e3520983663d45aed8a3895bb2031ada080fb5f04"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d685d458505b2bfd2e28c812749fe9194a2b0ce285a83537e4309a187ffa270b"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:334e0e414dab1f5366ead8ca34ec3148415f236d5660e175f1d640b11d645847"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02e56f7de72fa82561eae69628a7d6febd7891d72248c7ff7d3e7814d4031017"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:638d06b4e1d34d1a074fa87deed5fb55c18485fa0dab97abc5604aad84c12031"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:354dab7206d22d7a796fa27c4c5bffddd2393da2ad61835355a4759d435beb47"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d9d9f82ff2c3bf9bb777cb355149f7f3a98ec58f16b7428369dc27ea89556a4c"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:95ad58340e3b7d2b828efc370d1791856613c5cb62ae267158d96e47b3c978c9"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30fe05f4b7f6e9eb32862745512e7cbd021070ad0f289a7f48d14a0d3fc1d8a9"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34c688fef86f73dbca0798e0a61bada114677006afa524a8ce97d9e5fabf42e6"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-win32.whl", hash = "sha256:4d6d3d1436d57f41984920667ec5ef04bcb158f80df89ac4d0d3f775a2ac0c87"},
|
||||
{file = "lxml-5.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:2996e1116bbb3ae2a1fbb2ba4da8f92742290b4011e7e5bce2bd33bbc9d9485a"},
|
||||
{file = "lxml-5.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:521ab9c80b98c30b2d987001c3ede2e647e92eeb2ca02e8cb66ef5122d792b24"},
|
||||
{file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1231b0f9810289d41df1eacc4ebb859c63e4ceee29908a0217403cddce38d0"},
|
||||
{file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271f1a4d5d2b383c36ad8b9b489da5ea9c04eca795a215bae61ed6a57cf083cd"},
|
||||
{file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:6fca8a5a13906ba2677a5252752832beb0f483a22f6c86c71a2bb320fba04f61"},
|
||||
{file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ea0c3b7922209160faef194a5b6995bfe7fa05ff7dda6c423ba17646b7b9de10"},
|
||||
{file = "lxml-5.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0a006390834603e5952a2ff74b9a31a6007c7cc74282a087aa6467afb4eea987"},
|
||||
{file = "lxml-5.3.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eae4136a3b8c4cf76f69461fc8f9410d55d34ea48e1185338848a888d71b9675"},
|
||||
{file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d48e06be8d8c58e7feaedd8a37897a6122637efb1637d7ce00ddf5f11f9a92ad"},
|
||||
{file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4b83aed409134093d90e114007034d2c1ebcd92e501b71fd9ec70e612c8b2eb"},
|
||||
{file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7a0e77edfe26d3703f954d46bed52c3ec55f58586f18f4b7f581fc56954f1d84"},
|
||||
{file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:19f6fcfd15b82036b4d235749d78785eb9c991c7812012dc084e0d8853b4c1c0"},
|
||||
{file = "lxml-5.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d49919c95d31ee06eefd43d8c6f69a3cc9bdf0a9b979cc234c4071f0eb5cb173"},
|
||||
{file = "lxml-5.3.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2d0a60841410123c533990f392819804a8448853f06daf412c0f383443925e89"},
|
||||
{file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7f729e03090eb4e3981f10efaee35e6004b548636b1a062b8b9a525e752abc"},
|
||||
{file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579df6e20d8acce3bcbc9fb8389e6ae00c19562e929753f534ba4c29cfe0be4b"},
|
||||
{file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2abcf3f3b8367d6400b908d00d4cd279fc0b8efa287e9043820525762d383699"},
|
||||
{file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:348c06cb2e3176ce98bee8c397ecc89181681afd13d85870df46167f140a305f"},
|
||||
{file = "lxml-5.3.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:617ecaccd565cbf1ac82ffcaa410e7da5bd3a4b892bb3543fb2fe19bd1c4467d"},
|
||||
{file = "lxml-5.3.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c3eb4278dcdb9d86265ed2c20b9ecac45f2d6072e3904542e591e382c87a9c00"},
|
||||
{file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258b6b53458c5cbd2a88795557ff7e0db99f73a96601b70bc039114cd4ee9e02"},
|
||||
{file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a9d8d25ed2f2183e8471c97d512a31153e123ac5807f61396158ef2793cb6e"},
|
||||
{file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73bcb635a848c18a3e422ea0ab0092f2e4ef3b02d8ebe87ab49748ebc8ec03d8"},
|
||||
{file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1545de0a69a16ced5767bae8cca1801b842e6e49e96f5e4a8a5acbef023d970b"},
|
||||
{file = "lxml-5.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:165fcdc2f40fc0fe88a3c3c06c9c2a097388a90bda6a16e6f7c9199c903c9b8e"},
|
||||
{file = "lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -3854,26 +3860,26 @@ testing = ["google-api-core (>=1.31.5)"]
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "6.30.2"
|
||||
version = "6.31.1"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "protobuf-6.30.2-cp310-abi3-win32.whl", hash = "sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103"},
|
||||
{file = "protobuf-6.30.2-cp310-abi3-win_amd64.whl", hash = "sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9"},
|
||||
{file = "protobuf-6.30.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b"},
|
||||
{file = "protobuf-6.30.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815"},
|
||||
{file = "protobuf-6.30.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d"},
|
||||
{file = "protobuf-6.30.2-cp39-cp39-win32.whl", hash = "sha256:524afedc03b31b15586ca7f64d877a98b184f007180ce25183d1a5cb230ee72b"},
|
||||
{file = "protobuf-6.30.2-cp39-cp39-win_amd64.whl", hash = "sha256:acec579c39c88bd8fbbacab1b8052c793efe83a0a5bd99db4a31423a25c0a0e2"},
|
||||
{file = "protobuf-6.30.2-py3-none-any.whl", hash = "sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51"},
|
||||
{file = "protobuf-6.30.2.tar.gz", hash = "sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048"},
|
||||
{file = "protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9"},
|
||||
{file = "protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447"},
|
||||
{file = "protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402"},
|
||||
{file = "protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39"},
|
||||
{file = "protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6"},
|
||||
{file = "protobuf-6.31.1-cp39-cp39-win32.whl", hash = "sha256:0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16"},
|
||||
{file = "protobuf-6.31.1-cp39-cp39-win_amd64.whl", hash = "sha256:8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9"},
|
||||
{file = "protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e"},
|
||||
{file = "protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prowler"
|
||||
version = "5.6.0"
|
||||
version = "5.8.0"
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
optional = false
|
||||
python-versions = ">3.9.1,<3.13"
|
||||
@@ -3921,6 +3927,7 @@ numpy = "2.0.2"
|
||||
pandas = "2.2.3"
|
||||
py-ocsf-models = "0.3.1"
|
||||
pydantic = "1.10.21"
|
||||
pygithub = "2.5.0"
|
||||
python-dateutil = ">=2.9.0.post0,<3.0.0"
|
||||
pytz = "2025.1"
|
||||
schema = "0.7.7"
|
||||
@@ -3933,7 +3940,7 @@ tzlocal = "5.3.1"
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "9828824b737b8deda61f4a6646b54e0ad45033b9"
|
||||
resolved_reference = "ea97de7f43a2063476b49f7697bb6c7b51137c11"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -4110,11 +4117,11 @@ description = "C parser in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "dev"]
|
||||
markers = "platform_python_implementation != \"PyPy\""
|
||||
files = [
|
||||
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
|
||||
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
|
||||
]
|
||||
markers = {dev = "platform_python_implementation != \"PyPy\""}
|
||||
|
||||
[[package]]
|
||||
name = "pycurl"
|
||||
@@ -4225,6 +4232,26 @@ typing-extensions = ">=4.2.0"
|
||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
||||
email = ["email-validator (>=1.0.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.5.0"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "PyGithub-2.5.0-py3-none-any.whl", hash = "sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2"},
|
||||
{file = "pygithub-2.5.0.tar.gz", hash = "sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
Deprecated = "*"
|
||||
pyjwt = {version = ">=2.4.0", extras = ["crypto"]}
|
||||
pynacl = ">=1.4.0"
|
||||
requests = ">=2.14.0"
|
||||
typing-extensions = ">=4.0.0"
|
||||
urllib3 = ">=1.26.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.1"
|
||||
@@ -4289,6 +4316,33 @@ tomlkit = ">=0.10.1"
|
||||
spelling = ["pyenchant (>=3.2,<4.0)"]
|
||||
testutils = ["gitpython (>3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pynacl"
|
||||
version = "1.5.0"
|
||||
description = "Python binding to the Networking and Cryptography (NaCl) library"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"},
|
||||
{file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"},
|
||||
{file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = ">=1.4.1"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
|
||||
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.2.3"
|
||||
@@ -4934,7 +4988,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
|
||||
@@ -4943,7 +4996,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
|
||||
@@ -4952,7 +5004,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
|
||||
@@ -4961,7 +5012,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
|
||||
@@ -4970,7 +5020,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
|
||||
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
|
||||
@@ -5645,35 +5694,70 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "xmlsec"
|
||||
version = "1.3.15"
|
||||
version = "1.3.14"
|
||||
description = "Python bindings for the XML Security Library"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "xmlsec-1.3.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60209f82a254a1d6083397c4eeae131e7ac2f64bfddb97f2b0b240369f03c4df"},
|
||||
{file = "xmlsec-1.3.15-cp310-cp310-win32.whl", hash = "sha256:a62be0f8964bbec1efd2ca39b025c40da620a2ef9cb5440ff4ffa7e0c6906f70"},
|
||||
{file = "xmlsec-1.3.15-cp310-cp310-win_amd64.whl", hash = "sha256:685b92860bbf048e3b725bd5e9310bd4d3515f7eafcb2c284dda62078a1ce90c"},
|
||||
{file = "xmlsec-1.3.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c760230d4f77b7828857d076434e0810850eb2603775dc92fa9f760a98c2f694"},
|
||||
{file = "xmlsec-1.3.15-cp311-cp311-win32.whl", hash = "sha256:901458034b7476e1fd0881a85814e184d00eec2b5df33b1ceeb312681e8cb9e8"},
|
||||
{file = "xmlsec-1.3.15-cp311-cp311-win_amd64.whl", hash = "sha256:2ecbb65eea79a25769fbaa56c9e8bc4553aea63a9704795e962dfe06679b0191"},
|
||||
{file = "xmlsec-1.3.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0edff08e0442cdcc82bebf353ba4bcfd5a022f4b2751052ee1564afc5c78bef4"},
|
||||
{file = "xmlsec-1.3.15-cp312-cp312-win32.whl", hash = "sha256:e5c402e5633fd39f75fe124219d66d383a040ba04d0de54e024afeb7fe7d3e3a"},
|
||||
{file = "xmlsec-1.3.15-cp312-cp312-win_amd64.whl", hash = "sha256:0c47f2347e8dcc0a48648b9702af53179618c204414a8e36926a9f61214ebf0b"},
|
||||
{file = "xmlsec-1.3.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6ac2154311d32a6571e22f224ed16356029e59bd5ca76edeb3922a809adfe89c"},
|
||||
{file = "xmlsec-1.3.15-cp313-cp313-win32.whl", hash = "sha256:5ed218129f89b0592926ad2be42c017bece469db9b7380dc41bc09b01ca26d5d"},
|
||||
{file = "xmlsec-1.3.15-cp313-cp313-win_amd64.whl", hash = "sha256:5fc29e69b064323317b3862751a3a8107670e0a17510ca4517bbdc1939a90b1a"},
|
||||
{file = "xmlsec-1.3.15-cp36-cp36m-win32.whl", hash = "sha256:d0404dd76097b1f6dcbeff404c46cf045442a8cf9500f60c46a26ae03130ab9c"},
|
||||
{file = "xmlsec-1.3.15-cp36-cp36m-win_amd64.whl", hash = "sha256:672bb43a12d6b8e2e4a392ef495ea731ded5acc1585f9358174295a6fb5df262"},
|
||||
{file = "xmlsec-1.3.15-cp37-cp37m-win32.whl", hash = "sha256:96e24b22e862f0c50840a5af23cb7df186e7a1547b311a67ebca5b1e43ea0d86"},
|
||||
{file = "xmlsec-1.3.15-cp37-cp37m-win_amd64.whl", hash = "sha256:bec066ce81a82a5a2b994b1e7be2af11715fd716a55754c645668acf9c5a64c0"},
|
||||
{file = "xmlsec-1.3.15-cp38-cp38-win32.whl", hash = "sha256:95e80981b2e0ea74a7040cbf66b40072f4424298d7b50c3e587a026a7dab34ad"},
|
||||
{file = "xmlsec-1.3.15-cp38-cp38-win_amd64.whl", hash = "sha256:c2a40f8549769ba5fdc223f0ae564d3b4d4ca52b6461d46bc508d3321267b2ad"},
|
||||
{file = "xmlsec-1.3.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2d5692a683054dec769f4a1d6e8fade88ddcfc2cef89b20d0ecc1c75deb0dd6"},
|
||||
{file = "xmlsec-1.3.15-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:f0115d3b4f156df2cfee8424d75dcb7f5ca2cb4870af18b713098830493d3cb0"},
|
||||
{file = "xmlsec-1.3.15-cp39-cp39-win32.whl", hash = "sha256:ffb32d3c5af289c8598d4f9215c9f8f6c208f1551e78f0180f525bc08c8a67d2"},
|
||||
{file = "xmlsec-1.3.15-cp39-cp39-win_amd64.whl", hash = "sha256:3211da05c11c7a0d2b913a7834bff59e649150f41127949b3322442bc3986b56"},
|
||||
{file = "xmlsec-1.3.15.tar.gz", hash = "sha256:baa856b83d0012e278e6f6cbec96ac8128de667ca9fa9a2eeb02c752e816f6d8"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4dea6df3ffcb65d0b215678c3a0fe7bbc66785d6eae81291296e372498bad43a"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fa1311f7489d050dde9028f5a2b5849c2927bb09c9a93491cb2f28fdc563912"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cd9f513cf01dc0c5b9d9f0728714ecde2e7f46b3b6f63de91f4ae32f3008b3"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77749b338503fb6e151052c664064b34264f4168e2cb0cca1de78b7e5312a783"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4af81ce8044862ec865782efd353d22abdcd95b92364eef3c934de57ae6d5852"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cf35a25be3eb6263b2e0544ba26294651113fab79064f994d347a2ca5973e8e2"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:004e8a82e26728bf8a60f8ece1ef3ffafdac30ef538139dfe28870e8503ca64a"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-win32.whl", hash = "sha256:e6cbc914d77678db0c8bc39e723d994174633d18f9d6be4665ec29cce978a96d"},
|
||||
{file = "xmlsec-1.3.14-cp310-cp310-win_amd64.whl", hash = "sha256:4922afa9234d1c5763950b26c328a5320019e55eb6000272a79dfe54fee8e704"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7799a9ff3593f9dd43464e18b1a621640bffc40456c47c23383727f937dca7fc"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1fe23c2dd5f5dbcb24f40e2c1061e2672a32aabee7cf8ac5337036a485607d72"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be3b7a28e54a03b87faf07fb3c6dc3e50a2c79b686718c3ad08300b8bf6bb67"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e894ad3e7de373f56efc09d6a56f7eae73a8dd4cec8943313134849e9c6607"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:204d3c586b8bd6f02a5d4c59850a8157205569d40c32567f49576fa5795d897d"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6679cec780386d848e7351d4b0de92c4483289ea4f0a2187e216159f939a4c6b"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c4d41c83c8a2b8d8030204391ebeb6174fbdb044f0331653c4b5a4ce4150bcc0"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-win32.whl", hash = "sha256:df4aa0782a53032fd35e18dcd6d328d6126324bfcfdef0cb5c2856f25b4b6f94"},
|
||||
{file = "xmlsec-1.3.14-cp311-cp311-win_amd64.whl", hash = "sha256:1072878301cb9243a54679e0520e6a5be2266c07a28b0ecef9e029d05a90ffcd"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1eb3dcf244a52f796377112d8f238dbb522eb87facffb498425dc8582a84a6bf"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:330147ce59fbe56a9be5b2085d739c55a569f112576b3f1b33681f87416eaf33"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed4034939d8566ccdcd3b4e4f23c63fd807fb8763ae5668d59a19e11640a8242"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a98eadfcb0c3b23ccceb7a2f245811f8d784bd287640dcfe696a26b9db1e2fc0"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ff7b2711557c1087b72b0a1a88d82eafbf2a6d38b97309a6f7101d4a7041c3"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:774d5d1e45f07f953c1cc14fd055c1063f0725f7248b6b0e681f59fd8638934d"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd10ca3201f164482775a7ce61bf7ee9aade2e7d032046044dd0f6f52c91d79d"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-win32.whl", hash = "sha256:19c86bab1498e4c2e56d8e2c878f461ccb6e56b67fd7522b0c8fda46d8910781"},
|
||||
{file = "xmlsec-1.3.14-cp312-cp312-win_amd64.whl", hash = "sha256:d0762f4232bce2c7f6c0af329db8b821b4460bbe123a2528fb5677d03db7a4b5"},
|
||||
{file = "xmlsec-1.3.14-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:03ccba7dacf197850de954666af0221c740a5de631a80136362a1559223fab75"},
|
||||
{file = "xmlsec-1.3.14-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c12900e1903e289deb84eb893dca88591d6884d3e3cda4fb711b8812118416e8"},
|
||||
{file = "xmlsec-1.3.14-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6566434e2e5c58e472362a6187f208601f1627a148683a6f92bd16479f1d9e20"},
|
||||
{file = "xmlsec-1.3.14-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2401e162aaab7d9416c3405bac7a270e5f370988a0f1f46f0f29b735edba87e1"},
|
||||
{file = "xmlsec-1.3.14-cp36-cp36m-win32.whl", hash = "sha256:ba3b39c493e3b04354615068a3218f30897fcc2f42c6d8986d0c1d63aca87782"},
|
||||
{file = "xmlsec-1.3.14-cp36-cp36m-win_amd64.whl", hash = "sha256:4edd8db4df04bbac9c4a5ab4af855b74fe2bf2c248d07cac2e6d92a485f1a685"},
|
||||
{file = "xmlsec-1.3.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6dd86f440fec9242515c64f0be93fec8b4289287db1f6de2651eee9995aaecb"},
|
||||
{file = "xmlsec-1.3.14-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad1634cabe0915fe2a12e142db0ed2daf5be80cbe3891a2cecbba0750195cc6b"},
|
||||
{file = "xmlsec-1.3.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba457ff87c39cbae3c5020475a728d24bbd9d00376df9af9724cd3bb59ff07a"},
|
||||
{file = "xmlsec-1.3.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12d90059308bb0c1b94bde065784e6852999d08b91bcb2048c17e62b954acb07"},
|
||||
{file = "xmlsec-1.3.14-cp37-cp37m-win32.whl", hash = "sha256:ce4e165a1436697e5e39587c4fba24db4545a5c9801e0d749f1afd09ad3ab901"},
|
||||
{file = "xmlsec-1.3.14-cp37-cp37m-win_amd64.whl", hash = "sha256:7e8e0171916026cbe8e2022c959558d02086655fd3c3466f2bc0451b09cf9ee8"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c42735cc68fdb4c6065cf0a0701dfff3a12a1734c63a36376349af9a5481f27b"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:38e035bf48300b7dbde2dd01d3b8569f8584fc9c73809be13886e6b6c77b74fb"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73eabf5ef58189d81655058cf328c1dfa9893d89f1bff5fc941481f08533f338"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bddd2a2328b4e08c8a112e06cf2cd2b4d281f4ad94df15b4cef18f06cdc49d78"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fed3bc7943681c9ed4d2221600ab440f060d8d1a8f92f346f2b41effe175b8"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:147934bd39dfd840663fb6b920ea9201455fa886427975713f1b42d9f20b9b29"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e732a75fcb6b84872b168f972fbbf3749baf76308635f14015d1d35ed0c5719c"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-win32.whl", hash = "sha256:b109cdf717257fd4daa77c1d3ec8a3fb2a81318a6d06a36c55a8a53ae381ae5e"},
|
||||
{file = "xmlsec-1.3.14-cp38-cp38-win_amd64.whl", hash = "sha256:b7ba2ea38e3d9efa520b14f3c0b7d99a7c055244ae5ba8bc9f4ca73b18f3a215"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1b9b5de6bc69fdec23147e5f712cb05dc86df105462f254f140d743cc680cc7b"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:82ac81deb7d7bf5cc8a748148948e5df5386597ff43fb92ec651cc5c7addb0e7"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bae37b2920115cf00759ee9fb7841cbdebcef3a8a92734ab93ae8fa41ac581d"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fac2a787ae3b9fb761f9aec6b9f10f2d1c1b87abb574ebd8ff68435bdc97e3d"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34c61ec0c0e70fda710290ae74b9efe1928d9242ed82c4eecf97aa696cff68e6"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:995e87acecc263a2f6f2aa3cc204268f651cac8f4d7a2047f11b2cd49979cc38"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2f84a1c509c52773365645a87949081ee9ea9c535cd452048cc8ca4ad3b45666"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-win32.whl", hash = "sha256:7882963e9cb9c0bd0e8c2715a29159a366417ff4a30d8baf42b05bc5cf249446"},
|
||||
{file = "xmlsec-1.3.14-cp39-cp39-win_amd64.whl", hash = "sha256:a487c3d144f791c32f5e560aa27a705fba23171728b8a8511f36de053ff6bc93"},
|
||||
{file = "xmlsec-1.3.14.tar.gz", hash = "sha256:934f804f2f895bcdb86f1eaee236b661013560ee69ec108d29cdd6e5f292a2d9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -5821,4 +5905,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "0750d4d8d4c0b020c87a5c6e3c459f1f5f445e6f1395f7e492adea9a901e2056"
|
||||
content-hash = "6802b33984c2f8438c9dc02dac0a0c14d5a78af60251bd0c80ca59bc2182c48e"
|
||||
|
||||
@@ -23,12 +23,14 @@ dependencies = [
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10",
|
||||
"openai (>=1.82.0,<2.0.0)"
|
||||
"openai (>=1.82.0,<2.0.0)",
|
||||
"xmlsec==1.3.14"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -36,7 +38,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.9.0"
|
||||
version = "1.10.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -3,14 +3,7 @@ from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Membership,
|
||||
Role,
|
||||
SAMLConfiguration,
|
||||
Tenant,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
|
||||
|
||||
|
||||
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
@@ -24,7 +17,7 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
if sociallogin.account.provider == "saml":
|
||||
if sociallogin.provider.id == "saml":
|
||||
email = sociallogin.user.email
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
@@ -38,57 +31,10 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
provider = sociallogin.account.provider
|
||||
provider = sociallogin.provider.id
|
||||
extra = sociallogin.account.extra_data
|
||||
|
||||
if provider == "saml":
|
||||
# Handle SAML-specific logic
|
||||
user.first_name = extra.get("firstName", [""])[0]
|
||||
user.last_name = extra.get("lastName", [""])[0]
|
||||
user.company_name = extra.get("organization", [""])[0]
|
||||
user.name = f"{user.first_name} {user.last_name}".strip()
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
email_domain = user.email.split("@")[-1]
|
||||
tenant = (
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db)
|
||||
.get(email_domain=email_domain)
|
||||
.tenant
|
||||
)
|
||||
|
||||
with rls_transaction(str(tenant.id)):
|
||||
role_name = extra.get("userType", ["saml_default_role"])[0].strip()
|
||||
|
||||
try:
|
||||
role = Role.objects.using(MainRouter.admin_db).get(
|
||||
name=role_name, tenant_id=tenant.id
|
||||
)
|
||||
except Role.DoesNotExist:
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name=role_name,
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.MEMBER,
|
||||
)
|
||||
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
else:
|
||||
if provider != "saml":
|
||||
# Handle other providers (e.g., GitHub, Google)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = extra.get("name")
|
||||
@@ -119,5 +65,7 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
else:
|
||||
request.session["saml_user_created"] = str(user.id)
|
||||
|
||||
return user
|
||||
|
||||
@@ -529,3 +529,15 @@ class IntegrationTypeEnum(EnumType):
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Processor type
|
||||
|
||||
|
||||
class ProcessorTypeEnum(EnumType):
|
||||
enum_type_name = "processor_type"
|
||||
|
||||
|
||||
class ProcessorTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("processor_type", *args, **kwargs)
|
||||
|
||||
@@ -57,6 +57,11 @@ class TaskInProgressException(TaskManagementError):
|
||||
super().__init__()
|
||||
|
||||
|
||||
# Provider connection errors
|
||||
class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
@@ -73,3 +78,21 @@ def custom_exception_handler(exc, context):
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ConflictException(APIException):
|
||||
status_code = status.HTTP_409_CONFLICT
|
||||
default_detail = "A conflict occurred. The resource already exists."
|
||||
default_code = "conflict"
|
||||
|
||||
def __init__(self, detail=None, code=None, pointer=None):
|
||||
error_detail = {
|
||||
"detail": detail or self.default_detail,
|
||||
"status": self.status_code,
|
||||
"code": self.default_code,
|
||||
}
|
||||
|
||||
if pointer:
|
||||
error_detail["source"] = {"pointer": pointer}
|
||||
|
||||
super().__init__(detail=[error_detail])
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
|
||||
from dateutil.parser import parse
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django_filters.rest_framework import (
|
||||
@@ -28,6 +29,7 @@ from api.models import (
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
@@ -338,6 +340,8 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
scan = UUIDFilter(field_name="provider__scan", lookup_expr="exact")
|
||||
scan__in = UUIDInFilter(field_name="provider__scan", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
@@ -352,6 +356,82 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("updated_at")
|
||||
or self.data.get("updated_at__date")
|
||||
or self.data.get("updated_at__gte")
|
||||
or self.data.get("updated_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[updated_at], filter[updated_at.gte], "
|
||||
"or filter[updated_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
parse(self.data.get("updated_at__gte")).date()
|
||||
if self.data.get("updated_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
parse(self.data.get("updated_at__lte")).date()
|
||||
if self.data.get("updated_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class LatestResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
@@ -704,3 +784,12 @@ class IntegrationFilter(FilterSet):
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProcessorFilter(FilterSet):
|
||||
processor_type = ChoiceFilter(choices=Processor.ProcessorChoices.choices)
|
||||
processor_type__in = ChoiceInFilter(
|
||||
choices=Processor.ProcessorChoices.choices,
|
||||
field_name="processor_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
@@ -16,7 +16,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
|
||||
@@ -11,7 +11,7 @@ import api.rls
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0030_samlconfigurations"),
|
||||
("api", "0029_findings_check_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -54,6 +54,7 @@ class Migration(migrations.Migration):
|
||||
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
|
||||
("gpt-4o-mini", "GPT-4o Mini Default"),
|
||||
],
|
||||
default="gpt-4o-2024-08-06",
|
||||
help_text="Must be one of the supported model names",
|
||||
max_length=50,
|
||||
),
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-23 10:04
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0030_lighthouseconfiguration"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,57 +1,61 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-15 09:54
|
||||
# Generated by Django 5.1.10 on 2025-07-02 15:47
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0029_findings_check_index_parent"),
|
||||
("api", "0031_scan_disable_on_cascade_periodic_tasks"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="integration",
|
||||
name="integration_type",
|
||||
field=api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLDomainIndex",
|
||||
name="SAMLToken",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("email_domain", models.CharField(max_length=254, unique=True)),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("expires_at", models.DateTimeField(editable=False)),
|
||||
("token", models.JSONField(unique=True)),
|
||||
(
|
||||
"tenant",
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_domain_index",
|
||||
"db_table": "saml_tokens",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("email_domain", "tenant"),
|
||||
name="unique_resources_by_email_domain",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=api.rls.BaseSecurityConstraint(
|
||||
name="statements_on_samldomainindex",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLConfiguration",
|
||||
fields=[
|
||||
@@ -105,16 +109,42 @@ class Migration(migrations.Migration):
|
||||
fields=("tenant",), name="unique_samlconfig_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="integration",
|
||||
name="integration_type",
|
||||
field=api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
migrations.CreateModel(
|
||||
name="SAMLDomainIndex",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("email_domain", models.CharField(max_length=254, unique=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_domain_index",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("email_domain", "tenant"),
|
||||
name="unique_resources_by_email_domain",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=api.rls.BaseSecurityConstraint(
|
||||
name="statements_on_samldomainindex",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
34
api/src/backend/api/migrations/0033_processors_enum.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import PostgresEnumMigration, ProcessorTypeEnum, register_enum
|
||||
from api.models import Processor
|
||||
|
||||
ProcessorTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="processor_type",
|
||||
enum_values=tuple(
|
||||
processor_type[0] for processor_type in Processor.ProcessorChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0032_saml"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
ProcessorTypeEnumMigration.create_enum_type,
|
||||
reverse_code=ProcessorTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=ProcessorTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
88
api/src/backend/api/migrations/0034_processors.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-26 13:04
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0033_processors_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Processor",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"processor_type",
|
||||
api.db_utils.ProcessorTypeEnumField(
|
||||
choices=[("mutelist", "Mutelist")]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "processors",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"], name="processor_tenant_id_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "processor_type"],
|
||||
name="processor_tenant_type_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "processor_type"),
|
||||
name="unique_processor_types_tenant",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_processor",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="processor",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
to="api.processor",
|
||||
),
|
||||
),
|
||||
]
|
||||
22
api/src/backend/api/migrations/0035_finding_muted_reason.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0034_processors"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted_reason",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
max_length=500,
|
||||
null=True,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0035_finding_muted_reason"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
columns="tenant_id, finding_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0036_rfm_tenant_finding_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0037_rfm_tenant_finding_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="failed_findings_count",
|
||||
field=models.IntegerField(default=0),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0038_resource_failed_findings_count"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "-failed_findings_count", "id"],
|
||||
name="resources_failed_findings_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
import logging
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
@@ -33,6 +34,7 @@ from api.db_utils import (
|
||||
IntegrationTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
MemberRoleEnumField,
|
||||
ProcessorTypeEnumField,
|
||||
ProviderEnumField,
|
||||
ProviderSecretTypeEnumField,
|
||||
ScanTriggerEnumField,
|
||||
@@ -408,20 +410,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
name = models.CharField(
|
||||
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
)
|
||||
task = models.ForeignKey(
|
||||
Task,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
trigger = ScanTriggerEnumField(
|
||||
choices=TriggerChoices.choices,
|
||||
)
|
||||
@@ -437,11 +425,31 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
next_scan_at = models.DateTimeField(null=True, blank=True)
|
||||
scheduler_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
output_location = models.CharField(blank=True, null=True, max_length=200)
|
||||
|
||||
# TODO: mutelist foreign key
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
)
|
||||
task = models.ForeignKey(
|
||||
Task,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
processor = models.ForeignKey(
|
||||
"Processor",
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "scans"
|
||||
@@ -553,6 +561,8 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
details = models.TextField(blank=True, null=True)
|
||||
partition = models.TextField(blank=True, null=True)
|
||||
|
||||
failed_findings_count = models.IntegerField(default=0)
|
||||
|
||||
# Relationships
|
||||
tags = models.ManyToManyField(
|
||||
ResourceTag,
|
||||
@@ -599,6 +609,10 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "-failed_findings_count", "id"],
|
||||
name="resources_failed_findings_idx",
|
||||
),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
@@ -697,6 +711,9 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
check_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
check_metadata = models.JSONField(default=dict, null=False)
|
||||
muted = models.BooleanField(default=False, null=False)
|
||||
muted_reason = models.TextField(
|
||||
blank=True, null=True, validators=[MinLengthValidator(3)], max_length=500
|
||||
)
|
||||
compliance = models.JSONField(default=dict, null=True, blank=True)
|
||||
|
||||
# Denormalize resource data for performance
|
||||
@@ -838,6 +855,12 @@ class ResourceFindingMapping(PostgresPartitionedModel, RowLevelSecurityProtected
|
||||
# - tenant_id
|
||||
# - id
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "resource_id", "finding_id"),
|
||||
@@ -942,6 +965,11 @@ class Invitation(RowLevelSecurityProtectedModel):
|
||||
null=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.email:
|
||||
self.email = self.email.strip().lower()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "invitations"
|
||||
|
||||
@@ -1370,6 +1398,26 @@ class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
|
||||
class SAMLToken(models.Model):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
expires_at = models.DateTimeField(editable=False)
|
||||
token = models.JSONField(unique=True)
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
db_table = "saml_tokens"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.expires_at:
|
||||
self.expires_at = datetime.now(timezone.utc) + timedelta(seconds=15)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
return datetime.now(timezone.utc) >= self.expires_at
|
||||
|
||||
|
||||
class SAMLDomainIndex(models.Model):
|
||||
"""
|
||||
Public index of SAML domains. No RLS. Used for fast lookup in SAML login flow.
|
||||
@@ -1447,7 +1495,7 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
),
|
||||
]
|
||||
|
||||
def clean(self, old_email_domain=None):
|
||||
def clean(self, old_email_domain=None, is_create=False):
|
||||
# Domain must not contain @
|
||||
if "@" in self.email_domain:
|
||||
raise ValidationError({"email_domain": "Domain must not contain @"})
|
||||
@@ -1471,6 +1519,25 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
{"tenant": "There is a problem with your email domain."}
|
||||
)
|
||||
|
||||
# The entityID must be unique in the system
|
||||
idp_settings = self._parsed_metadata
|
||||
entity_id = idp_settings.get("entity_id")
|
||||
|
||||
if entity_id:
|
||||
# Find any SocialApp with this entityID
|
||||
q = SocialApp.objects.filter(provider="saml", provider_id=entity_id)
|
||||
|
||||
# If updating, exclude our own SocialApp from the check
|
||||
if not is_create:
|
||||
q = q.exclude(client_id=old_email_domain)
|
||||
else:
|
||||
q = q.exclude(client_id=self.email_domain)
|
||||
|
||||
if q.exists():
|
||||
raise ValidationError(
|
||||
{"metadata_xml": "There is a problem with your metadata."}
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.email_domain = self.email_domain.strip().lower()
|
||||
is_create = not SAMLConfiguration.objects.filter(pk=self.pk).exists()
|
||||
@@ -1483,7 +1550,8 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
old_email_domain = None
|
||||
old_metadata_xml = None
|
||||
|
||||
self.clean(old_email_domain)
|
||||
self._parsed_metadata = self._parse_metadata()
|
||||
self.clean(old_email_domain, is_create)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
if is_create or (
|
||||
@@ -1501,6 +1569,12 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
email_domain=self.email_domain, defaults={"tenant": self.tenant}
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
SocialApp.objects.filter(provider="saml", client_id=self.email_domain).delete()
|
||||
SAMLDomainIndex.objects.filter(email_domain=self.email_domain).delete()
|
||||
|
||||
def _parse_metadata(self):
|
||||
"""
|
||||
Parse the raw IdP metadata XML and extract:
|
||||
@@ -1520,6 +1594,8 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
|
||||
# Entity ID
|
||||
entity_id = root.attrib.get("entityID")
|
||||
if not entity_id:
|
||||
raise ValidationError({"metadata_xml": "Missing entityID in metadata."})
|
||||
|
||||
# SSO endpoint (must exist)
|
||||
sso = root.find(".//md:IDPSSODescriptor/md:SingleSignOnService", ns)
|
||||
@@ -1558,9 +1634,8 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
Create or update the corresponding SocialApp based on email_domain.
|
||||
If the domain changed, update the matching SocialApp.
|
||||
"""
|
||||
idp_settings = self._parse_metadata()
|
||||
settings_dict = SOCIALACCOUNT_PROVIDERS["saml"].copy()
|
||||
settings_dict["idp"] = idp_settings
|
||||
settings_dict["idp"] = self._parsed_metadata
|
||||
|
||||
current_site = Site.objects.get(id=settings.SITE_ID)
|
||||
|
||||
@@ -1568,19 +1643,24 @@ class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
provider="saml", client_id=previous_email_domain or self.email_domain
|
||||
)
|
||||
|
||||
client_id = self.email_domain[:191]
|
||||
name = f"SAML-{self.email_domain}"[:40]
|
||||
|
||||
if social_app_qs.exists():
|
||||
social_app = social_app_qs.first()
|
||||
social_app.client_id = self.email_domain
|
||||
social_app.name = f"{self.tenant.name} SAML ({self.email_domain})"
|
||||
social_app.client_id = client_id
|
||||
social_app.name = name
|
||||
social_app.settings = settings_dict
|
||||
social_app.provider_id = self._parsed_metadata["entity_id"]
|
||||
social_app.save()
|
||||
social_app.sites.set([current_site])
|
||||
else:
|
||||
social_app = SocialApp.objects.create(
|
||||
provider="saml",
|
||||
client_id=self.email_domain,
|
||||
name=f"{self.tenant.name} SAML ({self.email_domain})",
|
||||
client_id=client_id,
|
||||
name=name,
|
||||
settings=settings_dict,
|
||||
provider_id=self._parsed_metadata["entity_id"],
|
||||
)
|
||||
social_app.sites.set([current_site])
|
||||
|
||||
@@ -1759,3 +1839,42 @@ class LighthouseConfiguration(RowLevelSecurityProtectedModel):
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "lighthouse-configurations"
|
||||
|
||||
|
||||
class Processor(RowLevelSecurityProtectedModel):
|
||||
class ProcessorChoices(models.TextChoices):
|
||||
MUTELIST = "mutelist", _("Mutelist")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
processor_type = ProcessorTypeEnumField(choices=ProcessorChoices.choices)
|
||||
configuration = models.JSONField(default=dict)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "processors"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "processor_type"),
|
||||
name="unique_processor_types_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
name="processor_tenant_id_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "processor_type"],
|
||||
name="processor_tenant_type_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "processors"
|
||||
|
||||
@@ -11,7 +11,7 @@ def test_basic_authentication():
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password@1"
|
||||
|
||||
# Check that a 401 is returned when no basic authentication is provided
|
||||
no_auth_response = client.get(reverse("provider-list"))
|
||||
@@ -108,7 +108,7 @@ def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fi
|
||||
user1_email = "user1@testing.com"
|
||||
user2_email = "user2@testing.com"
|
||||
|
||||
password = "thisisapassword123"
|
||||
password = "Thisisapassword123@"
|
||||
|
||||
user1_response = client.post(
|
||||
reverse("user-list"),
|
||||
@@ -187,7 +187,7 @@ class TestTokenSwitchTenant:
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password1@"
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
|
||||
@@ -17,7 +17,7 @@ def test_delete_provider_without_executing_task(
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password1@"
|
||||
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialLogin
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
from api.adapters import ProwlerSocialAccountAdapter
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Membership, SAMLConfiguration, Tenant
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
@@ -27,7 +25,8 @@ class TestProwlerSocialAccountAdapter:
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.provider = "saml"
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.user = create_test_user
|
||||
sociallogin.connect = MagicMock()
|
||||
@@ -46,7 +45,9 @@ class TestProwlerSocialAccountAdapter:
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.provider = "github"
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.user = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.connect = MagicMock()
|
||||
|
||||
@@ -54,29 +55,23 @@ class TestProwlerSocialAccountAdapter:
|
||||
|
||||
sociallogin.connect.assert_not_called()
|
||||
|
||||
def test_save_user_saml_flow(
|
||||
self,
|
||||
rf,
|
||||
saml_setup,
|
||||
saml_sociallogin,
|
||||
):
|
||||
def test_save_user_saml_sets_session_flag(self, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
request = rf.get("/")
|
||||
saml_sociallogin.user.email = saml_setup["email"]
|
||||
request.session = {}
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).get(
|
||||
id=saml_setup["tenant_id"]
|
||||
)
|
||||
saml_config = SAMLConfiguration.objects.using(MainRouter.admin_db).get(
|
||||
tenant=tenant
|
||||
)
|
||||
assert saml_config.email_domain == saml_setup["domain"]
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.extra_data = {}
|
||||
|
||||
user = adapter.save_user(request, saml_sociallogin)
|
||||
mock_user = MagicMock()
|
||||
mock_user.id = 123
|
||||
|
||||
assert user.email == saml_setup["email"]
|
||||
assert (
|
||||
Membership.objects.using(MainRouter.admin_db)
|
||||
.filter(user=user, tenant=tenant)
|
||||
.exists()
|
||||
)
|
||||
with patch("api.adapters.super") as mock_super:
|
||||
with patch("api.adapters.transaction"):
|
||||
with patch("api.adapters.MainRouter"):
|
||||
mock_super.return_value.save_user.return_value = mock_user
|
||||
adapter.save_user(request, sociallogin)
|
||||
assert request.session["saml_user_created"] == "123"
|
||||
|
||||
@@ -3,7 +3,7 @@ from allauth.socialaccount.models import SocialApp
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Resource, ResourceTag, SAMLConfiguration, Tenant
|
||||
from api.models import Resource, ResourceTag, SAMLConfiguration, SAMLDomainIndex
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -142,8 +142,8 @@ class TestSAMLConfigurationModel:
|
||||
</md:EntityDescriptor>
|
||||
"""
|
||||
|
||||
def test_creates_valid_configuration(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant A")
|
||||
def test_creates_valid_configuration(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="ssoexample.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
@@ -153,8 +153,8 @@ class TestSAMLConfigurationModel:
|
||||
assert config.email_domain == "ssoexample.com"
|
||||
assert SocialApp.objects.filter(client_id="ssoexample.com").exists()
|
||||
|
||||
def test_email_domain_with_at_symbol_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant B")
|
||||
def test_email_domain_with_at_symbol_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration(
|
||||
email_domain="invalid@domain.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
@@ -168,9 +168,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "email_domain" in errors
|
||||
assert "Domain must not contain @" in errors["email_domain"][0]
|
||||
|
||||
def test_duplicate_email_domain_fails(self):
|
||||
tenant1 = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant C1")
|
||||
tenant2 = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant C2")
|
||||
def test_duplicate_email_domain_fails(self, tenants_fixture):
|
||||
tenant1, tenant2, *_ = tenants_fixture
|
||||
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="duplicate.com",
|
||||
@@ -191,8 +190,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "tenant" in errors
|
||||
assert "There is a problem with your email domain." in errors["tenant"][0]
|
||||
|
||||
def test_duplicate_tenant_config_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant D")
|
||||
def test_duplicate_tenant_config_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="unique1.com",
|
||||
@@ -216,8 +215,8 @@ class TestSAMLConfigurationModel:
|
||||
in errors["tenant"][0]
|
||||
)
|
||||
|
||||
def test_invalid_metadata_xml_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant E")
|
||||
def test_invalid_metadata_xml_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration(
|
||||
email_domain="brokenxml.com",
|
||||
metadata_xml="<bad<xml>",
|
||||
@@ -232,8 +231,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "Invalid XML" in errors["metadata_xml"][0]
|
||||
assert "not well-formed" in errors["metadata_xml"][0]
|
||||
|
||||
def test_metadata_missing_sso_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant F")
|
||||
def test_metadata_missing_sso_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
<md:IDPSSODescriptor></md:IDPSSODescriptor>
|
||||
</md:EntityDescriptor>"""
|
||||
@@ -250,8 +249,8 @@ class TestSAMLConfigurationModel:
|
||||
assert "metadata_xml" in errors
|
||||
assert "Missing SingleSignOnService" in errors["metadata_xml"][0]
|
||||
|
||||
def test_metadata_missing_certificate_fails(self):
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(name="Tenant G")
|
||||
def test_metadata_missing_certificate_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
<md:IDPSSODescriptor>
|
||||
<md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" Location="https://example.com/sso"/>
|
||||
@@ -269,3 +268,59 @@ class TestSAMLConfigurationModel:
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "X509Certificate" in errors["metadata_xml"][0]
|
||||
|
||||
def test_deletes_saml_configuration_and_related_objects(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
email_domain = "deleteme.com"
|
||||
|
||||
# Create the configuration
|
||||
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain=email_domain,
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
# Verify that the SocialApp and SAMLDomainIndex exist
|
||||
assert SocialApp.objects.filter(client_id=email_domain).exists()
|
||||
assert (
|
||||
SAMLDomainIndex.objects.using(MainRouter.admin_db)
|
||||
.filter(email_domain=email_domain)
|
||||
.exists()
|
||||
)
|
||||
|
||||
# Delete the configuration
|
||||
config.delete()
|
||||
|
||||
# Verify that the configuration and its related objects are deleted
|
||||
assert (
|
||||
not SAMLConfiguration.objects.using(MainRouter.admin_db)
|
||||
.filter(pk=config.pk)
|
||||
.exists()
|
||||
)
|
||||
assert not SocialApp.objects.filter(client_id=email_domain).exists()
|
||||
assert (
|
||||
not SAMLDomainIndex.objects.using(MainRouter.admin_db)
|
||||
.filter(email_domain=email_domain)
|
||||
.exists()
|
||||
)
|
||||
|
||||
def test_duplicate_entity_id_fails_on_creation(self, tenants_fixture):
|
||||
tenant1, tenant2, *_ = tenants_fixture
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="first.com",
|
||||
metadata_xml=self.VALID_METADATA,
|
||||
tenant=tenant1,
|
||||
)
|
||||
|
||||
config = SAMLConfiguration(
|
||||
email_domain="second.com",
|
||||
metadata_xml=self.VALID_METADATA,
|
||||
tenant=tenant2,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config.save()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "There is a problem with your metadata." in errors["metadata_xml"][0]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import TODAY
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
@@ -60,7 +61,7 @@ class TestUserViewSet:
|
||||
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"password": "Newpassword123@",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_rbac.post(
|
||||
@@ -74,7 +75,7 @@ class TestUserViewSet:
|
||||
):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"password": "Newpassword123@",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
@@ -321,7 +322,7 @@ class TestProviderViewSet:
|
||||
@pytest.mark.django_db
|
||||
class TestLimitedVisibility:
|
||||
TEST_EMAIL = "rbac@rbac.com"
|
||||
TEST_PASSWORD = "thisisapassword123"
|
||||
TEST_PASSWORD = "Thisisapassword123@"
|
||||
|
||||
@pytest.fixture
|
||||
def limited_admin_user(
|
||||
@@ -409,3 +410,87 @@ class TestLimitedVisibility:
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
|
||||
def test_overviews_providers(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"endpoint_name",
|
||||
[
|
||||
"findings",
|
||||
"findings_severity",
|
||||
],
|
||||
)
|
||||
def test_overviews_findings(
|
||||
self,
|
||||
endpoint_name,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
values = response.json()["data"]["attributes"].values()
|
||||
assert any(value > 0 for value in values)
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]["attributes"].values()
|
||||
assert all(value == 0 for value in data)
|
||||
|
||||
def test_overviews_services(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
@@ -131,6 +131,21 @@ class TestInitializeProwlerProvider:
|
||||
initialize_prowler_provider(provider)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(key="value")
|
||||
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_initialize_prowler_provider_with_mutelist(
|
||||
self, mock_return_prowler_provider
|
||||
):
|
||||
provider = MagicMock()
|
||||
provider.secret.secret = {"key": "value"}
|
||||
mutelist_processor = MagicMock()
|
||||
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
|
||||
initialize_prowler_provider(provider, mutelist_processor)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(
|
||||
key="value", mutelist_content={"key": "value"}
|
||||
)
|
||||
|
||||
|
||||
class TestProwlerProviderConnectionTest:
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
@@ -200,6 +215,25 @@ class TestGetProwlerProviderKwargs:
|
||||
expected_result = {**secret_dict, **expected_extra_kwargs}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_with_mutelist(self):
|
||||
provider_uid = "provider_uid"
|
||||
secret_dict = {"key": "value"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
mutelist_processor = MagicMock()
|
||||
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.AWS.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
|
||||
expected_result = {**secret_dict, "mutelist_content": {"key": "value"}}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_unsupported_provider(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
@@ -254,7 +288,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="user@example.com"
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_validation_error(self):
|
||||
@@ -269,7 +303,7 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_not_found(self):
|
||||
@@ -284,7 +318,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert exc_info.value.detail == "Invitation is not valid."
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_expired(self, invitation):
|
||||
@@ -332,5 +366,27 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="different@example.com"
|
||||
token="VALID_TOKEN", email__iexact="different@example.com"
|
||||
)
|
||||
|
||||
def test_valid_invitation_uppercase_email(self):
|
||||
"""Test that validate_invitation works with case-insensitive email lookup."""
|
||||
uppercase_email = "USER@example.com"
|
||||
|
||||
invitation = MagicMock(spec=Invitation)
|
||||
invitation.token = "VALID_TOKEN"
|
||||
invitation.email = uppercase_email
|
||||
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
|
||||
invitation.state = Invitation.State.PENDING
|
||||
invitation.tenant = MagicMock()
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
result = validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
)
|
||||
|
||||
@@ -7,7 +7,7 @@ from rest_framework.exceptions import NotFound, ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Invitation, Provider, Resource
|
||||
from api.models import Invitation, Processor, Provider, Resource
|
||||
from api.v1.serializers import FindingMetadataSerializer
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
@@ -83,11 +83,14 @@ def return_prowler_provider(
|
||||
return prowler_provider
|
||||
|
||||
|
||||
def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
def get_prowler_provider_kwargs(
|
||||
provider: Provider, mutelist_processor: Processor | None = None
|
||||
) -> dict:
|
||||
"""Get the Prowler provider kwargs based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secret.
|
||||
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
|
||||
|
||||
Returns:
|
||||
dict: The provider kwargs for the corresponding provider class.
|
||||
@@ -105,16 +108,24 @@ def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
}
|
||||
elif provider.provider == Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider_kwargs = {**prowler_provider_kwargs, "context": provider.uid}
|
||||
|
||||
if mutelist_processor:
|
||||
mutelist_content = mutelist_processor.configuration.get("Mutelist", {})
|
||||
if mutelist_content:
|
||||
prowler_provider_kwargs["mutelist_content"] = mutelist_content
|
||||
|
||||
return prowler_provider_kwargs
|
||||
|
||||
|
||||
def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
mutelist_processor: Processor | None = None,
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider:
|
||||
"""Initialize a Prowler provider instance based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
|
||||
@@ -122,7 +133,7 @@ def initialize_prowler_provider(
|
||||
provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
return prowler_provider(**prowler_provider_kwargs)
|
||||
|
||||
|
||||
@@ -187,7 +198,7 @@ def validate_invitation(
|
||||
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
|
||||
# is not a member of yet
|
||||
invitation = Invitation.objects.using(MainRouter.admin_db).get(
|
||||
token=invitation_token, email=email
|
||||
token=invitation_token, email__iexact=email
|
||||
)
|
||||
except Invitation.DoesNotExist:
|
||||
if raise_not_found:
|
||||
|
||||
@@ -24,20 +24,32 @@ class PaginateByPkMixin:
|
||||
request, # noqa: F841
|
||||
base_queryset,
|
||||
manager,
|
||||
select_related: list[str] | None = None,
|
||||
prefetch_related: list[str] | None = None,
|
||||
select_related: list | None = None,
|
||||
prefetch_related: list | None = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Paginate a queryset by primary key.
|
||||
|
||||
This method is useful when you want to paginate a queryset that has been
|
||||
filtered or annotated in a way that would be lost if you used the default
|
||||
pagination method.
|
||||
"""
|
||||
pk_list = base_queryset.values_list("id", flat=True)
|
||||
page = self.paginate_queryset(pk_list)
|
||||
if page is None:
|
||||
return Response(self.get_serializer(base_queryset, many=True).data)
|
||||
|
||||
queryset = manager.filter(id__in=page)
|
||||
|
||||
if select_related:
|
||||
queryset = queryset.select_related(*select_related)
|
||||
if prefetch_related:
|
||||
queryset = queryset.prefetch_related(*prefetch_related)
|
||||
|
||||
# Optimize tags loading, if applicable
|
||||
if hasattr(self, "_optimize_tags_loading"):
|
||||
queryset = self._optimize_tags_loading(queryset)
|
||||
|
||||
queryset = sorted(queryset, key=lambda obj: page.index(obj.id))
|
||||
|
||||
serialized = self.get_serializer(queryset, many=True).data
|
||||
|
||||
23
api/src/backend/api/v1/serializer_utils/base.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import yaml
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
class YamlOrJsonField(serializers.JSONField):
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, str):
|
||||
try:
|
||||
data = yaml.safe_load(data)
|
||||
except yaml.YAMLError as exc:
|
||||
raise serializers.ValidationError("Invalid YAML format") from exc
|
||||
return super().to_internal_value(data)
|
||||
@@ -1,19 +1,7 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
from api.v1.serializer_utils.base import BaseValidateSerializer
|
||||
|
||||
|
||||
class S3ConfigSerializer(BaseValidateSerializer):
|
||||
|
||||
21
api/src/backend/api/v1/serializer_utils/processors.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
|
||||
from api.v1.serializer_utils.base import YamlOrJsonField
|
||||
|
||||
from prowler.lib.mutelist.mutelist import mutelist_schema
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Mutelist",
|
||||
"properties": {"Mutelist": mutelist_schema},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class ProcessorConfigField(YamlOrJsonField):
|
||||
pass
|
||||
@@ -7,7 +7,9 @@ from django.contrib.auth.models import update_last_login
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from jwt.exceptions import InvalidKeyError
|
||||
from rest_framework.validators import UniqueTogetherValidator
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.relations import SerializerMethodResourceRelatedField
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError
|
||||
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
|
||||
@@ -21,6 +23,7 @@ from api.models import (
|
||||
InvitationRoleRelationship,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
@@ -44,7 +47,9 @@ from api.v1.serializer_utils.integrations import (
|
||||
IntegrationCredentialField,
|
||||
S3ConfigSerializer,
|
||||
)
|
||||
from api.v1.serializer_utils.processors import ProcessorConfigField
|
||||
from api.v1.serializer_utils.providers import ProviderSecretField
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
|
||||
# Tokens
|
||||
|
||||
@@ -130,6 +135,12 @@ class TokenSerializer(BaseTokenSerializer):
|
||||
|
||||
class TokenSocialLoginSerializer(BaseTokenSerializer):
|
||||
email = serializers.EmailField(write_only=True)
|
||||
tenant_id = serializers.UUIDField(
|
||||
write_only=True,
|
||||
required=False,
|
||||
help_text="If not provided, the tenant ID of the first membership that was added"
|
||||
" to the user will be used.",
|
||||
)
|
||||
|
||||
# Output tokens
|
||||
refresh = serializers.CharField(read_only=True)
|
||||
@@ -851,6 +862,7 @@ class ScanSerializer(RLSSerializer):
|
||||
"completed_at",
|
||||
"scheduled_at",
|
||||
"next_scan_at",
|
||||
"processor",
|
||||
"url",
|
||||
]
|
||||
|
||||
@@ -988,8 +1000,12 @@ class ResourceSerializer(RLSSerializer):
|
||||
|
||||
tags = serializers.SerializerMethodField()
|
||||
type_ = serializers.CharField(read_only=True)
|
||||
failed_findings_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
findings = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
findings = SerializerMethodResourceRelatedField(
|
||||
many=True,
|
||||
read_only=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
@@ -1005,6 +1021,7 @@ class ResourceSerializer(RLSSerializer):
|
||||
"tags",
|
||||
"provider",
|
||||
"findings",
|
||||
"failed_findings_count",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
@@ -1014,8 +1031,8 @@ class ResourceSerializer(RLSSerializer):
|
||||
}
|
||||
|
||||
included_serializers = {
|
||||
"findings": "api.v1.serializers.FindingSerializer",
|
||||
"provider": "api.v1.serializers.ProviderSerializer",
|
||||
"findings": "api.v1.serializers.FindingIncludeSerializer",
|
||||
"provider": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
}
|
||||
|
||||
@extend_schema_field(
|
||||
@@ -1026,6 +1043,10 @@ class ResourceSerializer(RLSSerializer):
|
||||
}
|
||||
)
|
||||
def get_tags(self, obj):
|
||||
# Use prefetched tags if available to avoid N+1 queries
|
||||
if hasattr(obj, "prefetched_tags"):
|
||||
return {tag.key: tag.value for tag in obj.prefetched_tags}
|
||||
# Fallback to the original method if prefetch is not available
|
||||
return obj.get_tags(self.context.get("tenant_id"))
|
||||
|
||||
def get_fields(self):
|
||||
@@ -1035,10 +1056,17 @@ class ResourceSerializer(RLSSerializer):
|
||||
fields["type"] = type_
|
||||
return fields
|
||||
|
||||
def get_findings(self, obj):
|
||||
return (
|
||||
obj.latest_findings
|
||||
if hasattr(obj, "latest_findings")
|
||||
else obj.findings.all()
|
||||
)
|
||||
|
||||
|
||||
class ResourceIncludeSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Resource model.
|
||||
Serializer for the included Resource model.
|
||||
"""
|
||||
|
||||
tags = serializers.SerializerMethodField()
|
||||
@@ -1071,6 +1099,10 @@ class ResourceIncludeSerializer(RLSSerializer):
|
||||
}
|
||||
)
|
||||
def get_tags(self, obj):
|
||||
# Use prefetched tags if available to avoid N+1 queries
|
||||
if hasattr(obj, "prefetched_tags"):
|
||||
return {tag.key: tag.value for tag in obj.prefetched_tags}
|
||||
# Fallback to the original method if prefetch is not available
|
||||
return obj.get_tags(self.context.get("tenant_id"))
|
||||
|
||||
def get_fields(self):
|
||||
@@ -1081,6 +1113,17 @@ class ResourceIncludeSerializer(RLSSerializer):
|
||||
return fields
|
||||
|
||||
|
||||
class ResourceMetadataSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
types = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# tags = serializers.JSONField(help_text="Tags are described as key-value pairs.")
|
||||
|
||||
class Meta:
|
||||
resource_name = "resources-metadata"
|
||||
|
||||
|
||||
class FindingSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Finding model.
|
||||
@@ -1104,6 +1147,7 @@ class FindingSerializer(RLSSerializer):
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"muted_reason",
|
||||
"url",
|
||||
# Relationships
|
||||
"scan",
|
||||
@@ -1116,6 +1160,28 @@ class FindingSerializer(RLSSerializer):
|
||||
}
|
||||
|
||||
|
||||
class FindingIncludeSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the include Finding model.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = [
|
||||
"id",
|
||||
"uid",
|
||||
"status",
|
||||
"severity",
|
||||
"check_id",
|
||||
"check_metadata",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"muted_reason",
|
||||
]
|
||||
|
||||
|
||||
# To be removed when the related endpoint is removed as well
|
||||
class FindingDynamicFilterSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
@@ -1200,8 +1266,8 @@ class M365ProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
tenant_id = serializers.CharField()
|
||||
user = serializers.EmailField()
|
||||
password = serializers.CharField()
|
||||
user = serializers.EmailField(required=False)
|
||||
password = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
@@ -1309,12 +1375,13 @@ class ProviderSecretUpdateSerializer(BaseWriteProviderSecretSerializer):
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"provider": {"read_only": True},
|
||||
"secret_type": {"read_only": True},
|
||||
"secret_type": {"required": False},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
provider = self.instance.provider
|
||||
secret_type = self.instance.secret_type
|
||||
# To allow updating a secret with the same type without making the `secret_type` mandatory
|
||||
secret_type = attrs.get("secret_type") or self.instance.secret_type
|
||||
secret = attrs.get("secret")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
@@ -2065,6 +2132,128 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
# Processors
|
||||
|
||||
|
||||
class ProcessorSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Processor model.
|
||||
"""
|
||||
|
||||
configuration = ProcessorConfigField()
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"processor_type",
|
||||
"configuration",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
||||
class ProcessorCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
configuration = ProcessorConfigField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"processor_type",
|
||||
"configuration",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
validators = [
|
||||
UniqueTogetherValidator(
|
||||
queryset=Processor.objects.all(),
|
||||
fields=["processor_type"],
|
||||
message="A processor with the same type already exists.",
|
||||
)
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_processor_data(attrs)
|
||||
return validated_attrs
|
||||
|
||||
def validate_processor_data(self, attrs):
|
||||
processor_type = attrs.get("processor_type")
|
||||
configuration = attrs.get("configuration")
|
||||
if processor_type == "mutelist":
|
||||
self.validate_mutelist_configuration(configuration)
|
||||
|
||||
def validate_mutelist_configuration(self, configuration):
|
||||
if not isinstance(configuration, dict):
|
||||
raise serializers.ValidationError("Invalid Mutelist configuration.")
|
||||
|
||||
mutelist_configuration = configuration.get("Mutelist", {})
|
||||
|
||||
if not mutelist_configuration:
|
||||
raise serializers.ValidationError(
|
||||
"Invalid Mutelist configuration: 'Mutelist' is a required property."
|
||||
)
|
||||
|
||||
try:
|
||||
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
|
||||
return
|
||||
except Exception as error:
|
||||
raise serializers.ValidationError(
|
||||
f"Invalid Mutelist configuration: {error}"
|
||||
)
|
||||
|
||||
|
||||
class ProcessorUpdateSerializer(BaseWriteSerializer):
|
||||
configuration = ProcessorConfigField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"configuration",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_processor_data(attrs)
|
||||
return validated_attrs
|
||||
|
||||
def validate_processor_data(self, attrs):
|
||||
processor_type = self.instance.processor_type
|
||||
configuration = attrs.get("configuration")
|
||||
if processor_type == "mutelist":
|
||||
self.validate_mutelist_configuration(configuration)
|
||||
|
||||
def validate_mutelist_configuration(self, configuration):
|
||||
if not isinstance(configuration, dict):
|
||||
raise serializers.ValidationError("Invalid Mutelist configuration.")
|
||||
|
||||
mutelist_configuration = configuration.get("Mutelist", {})
|
||||
|
||||
if not mutelist_configuration:
|
||||
raise serializers.ValidationError(
|
||||
"Invalid Mutelist configuration: 'Mutelist' is a required property."
|
||||
)
|
||||
|
||||
try:
|
||||
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
|
||||
return
|
||||
except Exception as error:
|
||||
raise serializers.ValidationError(
|
||||
f"Invalid Mutelist configuration: {error}"
|
||||
)
|
||||
|
||||
|
||||
# SSO
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from allauth.socialaccount.providers.saml.views import ACSView, MetadataView, SLSView
|
||||
from django.urls import include, path
|
||||
from drf_spectacular.views import SpectacularRedocView
|
||||
from rest_framework_nested import routers
|
||||
|
||||
from api.v1.views import (
|
||||
ComplianceOverviewViewSet,
|
||||
CustomSAMLLoginView,
|
||||
CustomTokenObtainView,
|
||||
CustomTokenRefreshView,
|
||||
CustomTokenSwitchTenantView,
|
||||
@@ -16,6 +18,7 @@ from api.v1.views import (
|
||||
LighthouseConfigViewSet,
|
||||
MembershipViewSet,
|
||||
OverviewViewSet,
|
||||
ProcessorViewSet,
|
||||
ProviderGroupProvidersRelationshipView,
|
||||
ProviderGroupViewSet,
|
||||
ProviderSecretViewSet,
|
||||
@@ -25,6 +28,7 @@ from api.v1.views import (
|
||||
RoleViewSet,
|
||||
SAMLConfigurationViewSet,
|
||||
SAMLInitiateAPIView,
|
||||
SAMLTokenValidateView,
|
||||
ScanViewSet,
|
||||
ScheduleViewSet,
|
||||
SchemaView,
|
||||
@@ -53,6 +57,7 @@ router.register(
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
router.register(r"integrations", IntegrationViewSet, basename="integration")
|
||||
router.register(r"processors", ProcessorViewSet, basename="processor")
|
||||
router.register(r"saml-config", SAMLConfigurationViewSet, basename="saml-config")
|
||||
router.register(
|
||||
r"lighthouse-configurations",
|
||||
@@ -126,13 +131,32 @@ urlpatterns = [
|
||||
path(
|
||||
"auth/saml/initiate/", SAMLInitiateAPIView.as_view(), name="api_saml_initiate"
|
||||
),
|
||||
# Allauth SAML endpoints for tenants
|
||||
path("accounts/", include("allauth.urls")),
|
||||
path(
|
||||
"api/v1/accounts/saml/<organization_slug>/acs/finish/",
|
||||
"accounts/saml/<organization_slug>/login/",
|
||||
CustomSAMLLoginView.as_view(),
|
||||
name="saml_login",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/acs/",
|
||||
ACSView.as_view(),
|
||||
name="saml_acs",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/acs/finish/",
|
||||
TenantFinishACSView.as_view(),
|
||||
name="saml_finish_acs",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/sls/",
|
||||
SLSView.as_view(),
|
||||
name="saml_sls",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/metadata/",
|
||||
MetadataView.as_view(),
|
||||
name="saml_metadata",
|
||||
),
|
||||
path("tokens/saml", SAMLTokenValidateView.as_view(), name="token-saml"),
|
||||
path("tokens/google", GoogleSocialLoginView.as_view(), name="token-google"),
|
||||
path("tokens/github", GithubSocialLoginView.as_view(), name="token-github"),
|
||||
path("", include(router.urls)),
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import sentry_sdk
|
||||
from allauth.socialaccount.models import SocialAccount, SocialApp
|
||||
from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
|
||||
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
|
||||
from allauth.socialaccount.providers.saml.views import FinishACSView
|
||||
from allauth.socialaccount.providers.saml.views import FinishACSView, LoginView
|
||||
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
|
||||
from celery.result import AsyncResult
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.env import env
|
||||
from config.settings.social_login import (
|
||||
GITHUB_OAUTH_CALLBACK_URL,
|
||||
@@ -19,9 +22,9 @@ from django.conf import settings as django_settings
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.search import SearchQuery
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, Exists, F, OuterRef, Prefetch, Q, Sum
|
||||
from django.db.models import Count, F, Prefetch, Q, Sum
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.http import HttpResponse, JsonResponse
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import reverse
|
||||
from django.utils.dateparse import parse_date
|
||||
@@ -75,7 +78,9 @@ from api.filters import (
|
||||
IntegrationFilter,
|
||||
InvitationFilter,
|
||||
LatestFindingFilter,
|
||||
LatestResourceFilter,
|
||||
MembershipFilter,
|
||||
ProcessorFilter,
|
||||
ProviderFilter,
|
||||
ProviderGroupFilter,
|
||||
ProviderSecretFilter,
|
||||
@@ -89,13 +94,13 @@ from api.filters import (
|
||||
UserFilter,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
@@ -103,10 +108,12 @@ from api.models import (
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
ResourceTag,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
SAMLConfiguration,
|
||||
SAMLDomainIndex,
|
||||
SAMLToken,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
@@ -148,6 +155,9 @@ from api.v1.serializers import (
|
||||
OverviewProviderSerializer,
|
||||
OverviewServiceSerializer,
|
||||
OverviewSeveritySerializer,
|
||||
ProcessorCreateSerializer,
|
||||
ProcessorSerializer,
|
||||
ProcessorUpdateSerializer,
|
||||
ProviderCreateSerializer,
|
||||
ProviderGroupCreateSerializer,
|
||||
ProviderGroupMembershipSerializer,
|
||||
@@ -158,6 +168,7 @@ from api.v1.serializers import (
|
||||
ProviderSecretUpdateSerializer,
|
||||
ProviderSerializer,
|
||||
ProviderUpdateSerializer,
|
||||
ResourceMetadataSerializer,
|
||||
ResourceSerializer,
|
||||
RoleCreateSerializer,
|
||||
RoleProviderGroupRelationshipSerializer,
|
||||
@@ -183,6 +194,8 @@ from api.v1.serializers import (
|
||||
UserUpdateSerializer,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
CACHE_DECORATOR = cache_control(
|
||||
max_age=django_settings.CACHE_MAX_AGE,
|
||||
stale_while_revalidate=django_settings.CACHE_STALE_WHILE_REVALIDATE,
|
||||
@@ -279,7 +292,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.9.0"
|
||||
spectacular_settings.VERSION = "1.10.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -345,6 +358,11 @@ class SchemaView(SpectacularAPIView):
|
||||
"description": "Endpoints for managing Lighthouse configurations, including creation, retrieval, "
|
||||
"updating, and deletion of configurations such as OpenAI keys, models, and business context.",
|
||||
},
|
||||
{
|
||||
"name": "Processor",
|
||||
"description": "Endpoints for managing post-processors used to process Prowler findings, including "
|
||||
"registration, configuration, and deletion of post-processing actions.",
|
||||
},
|
||||
]
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
@@ -401,17 +419,68 @@ class GithubSocialLoginView(SocialLoginView):
|
||||
return original_response
|
||||
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
class SAMLTokenValidateView(GenericAPIView):
|
||||
resource_name = "tokens"
|
||||
http_method_names = ["post"]
|
||||
|
||||
def post(self, request):
|
||||
token_id = request.query_params.get("id", "invalid")
|
||||
try:
|
||||
saml_token = SAMLToken.objects.using(MainRouter.admin_db).get(id=token_id)
|
||||
except SAMLToken.DoesNotExist:
|
||||
return Response({"detail": "Invalid token ID."}, status=404)
|
||||
|
||||
if saml_token.is_expired():
|
||||
return Response({"detail": "Token expired."}, status=400)
|
||||
|
||||
token_data = saml_token.token
|
||||
# Currently we don't store the tokens in the database, so we delete the token after use
|
||||
saml_token.delete()
|
||||
|
||||
return Response(token_data, status=200)
|
||||
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
class CustomSAMLLoginView(LoginView):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""
|
||||
Convert GET requests to POST to bypass allauth's confirmation screen.
|
||||
|
||||
Why this is necessary:
|
||||
- django-allauth requires POST for social logins to prevent open redirect attacks
|
||||
- SAML login links typically use GET requests (e.g., <a href="...">)
|
||||
- This conversion allows seamless login without user-facing confirmation
|
||||
|
||||
Security considerations:
|
||||
1. Preserves CSRF protection: Original POST handling remains intact
|
||||
2. Avoids global SOCIALACCOUNT_LOGIN_ON_GET=True which would:
|
||||
- Enable GET logins for ALL providers (security risk)
|
||||
- Potentially expose open redirect vulnerabilities
|
||||
3. SAML payloads remain signed/encrypted regardless of HTTP method
|
||||
4. No sensitive parameters are exposed in URLs (copied to POST body)
|
||||
|
||||
This approach maintains security while providing better UX.
|
||||
"""
|
||||
if request.method == "GET":
|
||||
# Convert GET to POST while preserving parameters
|
||||
request.method = "POST"
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
class SAMLInitiateAPIView(GenericAPIView):
|
||||
serializer_class = SamlInitiateSerializer
|
||||
permission_classes = []
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Validate the input payload and extract the domain
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
email = serializer.validated_data["email_domain"]
|
||||
domain = email.split("@", 1)[-1].lower()
|
||||
|
||||
# Retrieve the SAML configuration for the given email domain
|
||||
try:
|
||||
check = SAMLDomainIndex.objects.get(email_domain=domain)
|
||||
with rls_transaction(str(check.tenant_id)):
|
||||
@@ -421,20 +490,24 @@ class SAMLInitiateAPIView(GenericAPIView):
|
||||
{"detail": "Unauthorized domain."}, status=status.HTTP_403_FORBIDDEN
|
||||
)
|
||||
|
||||
# Check certificates are not empty
|
||||
saml_public_cert = os.getenv("SAML_PUBLIC_CERT", "").strip()
|
||||
saml_private_key = os.getenv("SAML_PRIVATE_KEY", "").strip()
|
||||
# Check certificates are not empty (TODO: Validate certificates)
|
||||
# saml_public_cert = os.getenv("SAML_PUBLIC_CERT", "").strip()
|
||||
# saml_private_key = os.getenv("SAML_PRIVATE_KEY", "").strip()
|
||||
|
||||
if not saml_public_cert or not saml_private_key:
|
||||
return Response(
|
||||
{"detail": "SAML configuration is invalid: missing certificates."},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
# if not saml_public_cert or not saml_private_key:
|
||||
# return Response(
|
||||
# {"detail": "SAML configuration is invalid: missing certificates."},
|
||||
# status=status.HTTP_403_FORBIDDEN,
|
||||
# )
|
||||
|
||||
saml_login_url = reverse(
|
||||
# Build the SAML login URL using the configured API host
|
||||
api_host = os.getenv("API_BASE_URL")
|
||||
login_path = reverse(
|
||||
"saml_login", kwargs={"organization_slug": config.email_domain}
|
||||
)
|
||||
return redirect(f"{saml_login_url}?email={email}")
|
||||
login_url = urljoin(api_host, login_path)
|
||||
|
||||
return redirect(login_url)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
@@ -492,27 +565,64 @@ class SAMLConfigurationViewSet(BaseRLSViewSet):
|
||||
|
||||
|
||||
class TenantFinishACSView(FinishACSView):
|
||||
def _rollback_saml_user(self, request):
|
||||
"""Helper function to rollback SAML user if it was just created and validation fails"""
|
||||
saml_user_id = request.session.get("saml_user_created")
|
||||
if saml_user_id:
|
||||
User.objects.using(MainRouter.admin_db).filter(id=saml_user_id).delete()
|
||||
request.session.pop("saml_user_created", None)
|
||||
|
||||
def dispatch(self, request, organization_slug):
|
||||
response = super().dispatch(request, organization_slug)
|
||||
try:
|
||||
super().dispatch(request, organization_slug)
|
||||
except Exception as e:
|
||||
logger.error(f"SAML dispatch failed: {e}")
|
||||
self._rollback_saml_user(request)
|
||||
callback_url = env.str("AUTH_URL")
|
||||
return redirect(f"{callback_url}?sso_saml_failed=true")
|
||||
|
||||
user = getattr(request, "user", None)
|
||||
if not user or not user.is_authenticated:
|
||||
return response
|
||||
self._rollback_saml_user(request)
|
||||
callback_url = env.str("AUTH_URL")
|
||||
return redirect(f"{callback_url}?sso_saml_failed=true")
|
||||
|
||||
# Defensive check to avoid edge case failures due to inconsistent or incomplete data in the database
|
||||
# This handles scenarios like partially deleted or missing related objects
|
||||
try:
|
||||
check = SAMLDomainIndex.objects.get(email_domain=organization_slug)
|
||||
with rls_transaction(str(check.tenant_id)):
|
||||
SAMLConfiguration.objects.get(tenant_id=str(check.tenant_id))
|
||||
social_app = SocialApp.objects.get(
|
||||
provider="saml", client_id=organization_slug
|
||||
)
|
||||
user_id = User.objects.get(email=str(user)).id
|
||||
social_account = SocialAccount.objects.get(
|
||||
user=user, provider=social_app.provider
|
||||
user=str(user_id), provider=social_app.provider_id
|
||||
)
|
||||
except (SocialApp.DoesNotExist, SocialAccount.DoesNotExist):
|
||||
return response
|
||||
except (
|
||||
SAMLDomainIndex.DoesNotExist,
|
||||
SAMLConfiguration.DoesNotExist,
|
||||
SocialApp.DoesNotExist,
|
||||
SocialAccount.DoesNotExist,
|
||||
User.DoesNotExist,
|
||||
) as e:
|
||||
logger.error(f"SAML user is not authenticated: {e}")
|
||||
self._rollback_saml_user(request)
|
||||
callback_url = env.str("AUTH_URL")
|
||||
return redirect(f"{callback_url}?sso_saml_failed=true")
|
||||
|
||||
extra = social_account.extra_data
|
||||
user.first_name = extra.get("firstName", [""])[0]
|
||||
user.last_name = extra.get("lastName", [""])[0]
|
||||
user.company_name = extra.get("organization", [""])[0]
|
||||
user.first_name = (
|
||||
extra.get("firstName", [""])[0] if extra.get("firstName") else ""
|
||||
)
|
||||
user.last_name = extra.get("lastName", [""])[0] if extra.get("lastName") else ""
|
||||
user.company_name = (
|
||||
extra.get("organization", [""])[0] if extra.get("organization") else ""
|
||||
)
|
||||
user.name = f"{user.first_name} {user.last_name}".strip()
|
||||
if user.name == "":
|
||||
user.name = "N/A"
|
||||
user.save()
|
||||
|
||||
email_domain = user.email.split("@")[-1]
|
||||
@@ -521,7 +631,11 @@ class TenantFinishACSView(FinishACSView):
|
||||
.get(email_domain=email_domain)
|
||||
.tenant
|
||||
)
|
||||
role_name = extra.get("userType", ["saml_default_role"])[0].strip()
|
||||
role_name = (
|
||||
extra.get("userType", ["no_permissions"])[0].strip()
|
||||
if extra.get("userType")
|
||||
else "no_permissions"
|
||||
)
|
||||
try:
|
||||
role = Role.objects.using(MainRouter.admin_db).get(
|
||||
name=role_name, tenant=tenant
|
||||
@@ -547,16 +661,31 @@ class TenantFinishACSView(FinishACSView):
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
serializer = TokenSocialLoginSerializer(data={"email": user.email})
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return JsonResponse(
|
||||
{
|
||||
"type": "saml-social-tokens",
|
||||
"attributes": serializer.validated_data,
|
||||
}
|
||||
membership, _ = Membership.objects.using(MainRouter.admin_db).get_or_create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
defaults={
|
||||
"user": user,
|
||||
"tenant": tenant,
|
||||
"role": Membership.RoleChoices.MEMBER,
|
||||
},
|
||||
)
|
||||
|
||||
serializer = TokenSocialLoginSerializer(
|
||||
data={"email": user.email, "tenant_id": str(tenant.id)}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
token_data = serializer.validated_data
|
||||
saml_token = SAMLToken.objects.using(MainRouter.admin_db).create(
|
||||
token=token_data, user=user
|
||||
)
|
||||
callback_url = env.str("SAML_SSO_CALLBACK_URL")
|
||||
redirect_url = f"{callback_url}?id={saml_token.id}"
|
||||
request.session.pop("saml_user_created", None)
|
||||
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
@@ -1756,6 +1885,14 @@ class TaskViewSet(BaseRLSViewSet):
|
||||
summary="List all resources",
|
||||
description="Retrieve a list of all resources with options for filtering by various criteria. Resources are "
|
||||
"objects that are discovered by Prowler. They can be anything from a single host to a whole VPC.",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="filter[updated_at]",
|
||||
description="At least one of the variations of the `filter[updated_at]` filter must be provided.",
|
||||
required=True,
|
||||
type=OpenApiTypes.DATE,
|
||||
)
|
||||
],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
tags=["Resource"],
|
||||
@@ -1763,15 +1900,43 @@ class TaskViewSet(BaseRLSViewSet):
|
||||
description="Fetch detailed information about a specific resource by their ID. A Resource is an object that "
|
||||
"is discovered by Prowler. It can be anything from a single host to a whole VPC.",
|
||||
),
|
||||
metadata=extend_schema(
|
||||
tags=["Resource"],
|
||||
summary="Retrieve metadata values from resources",
|
||||
description="Fetch unique metadata values from a set of resources. This is useful for dynamic filtering.",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="filter[updated_at]",
|
||||
description="At least one of the variations of the `filter[updated_at]` filter must be provided.",
|
||||
required=True,
|
||||
type=OpenApiTypes.DATE,
|
||||
)
|
||||
],
|
||||
filters=True,
|
||||
),
|
||||
latest=extend_schema(
|
||||
tags=["Resource"],
|
||||
summary="List the latest resources",
|
||||
description="Retrieve a list of the latest resources from the latest scans for each provider with options for "
|
||||
"filtering by various criteria.",
|
||||
filters=True,
|
||||
),
|
||||
metadata_latest=extend_schema(
|
||||
tags=["Resource"],
|
||||
summary="Retrieve metadata values from the latest resources",
|
||||
description="Fetch unique metadata values from a set of resources from the latest scans for each provider. "
|
||||
"This is useful for dynamic filtering.",
|
||||
filters=True,
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
@method_decorator(CACHE_DECORATOR, name="retrieve")
|
||||
class ResourceViewSet(BaseRLSViewSet):
|
||||
queryset = Resource.objects.all()
|
||||
class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
queryset = Resource.all_objects.all()
|
||||
serializer_class = ResourceSerializer
|
||||
http_method_names = ["get"]
|
||||
filterset_class = ResourceFilter
|
||||
ordering = ["-inserted_at"]
|
||||
ordering = ["-failed_findings_count", "-updated_at"]
|
||||
ordering_fields = [
|
||||
"provider_uid",
|
||||
"uid",
|
||||
@@ -1782,6 +1947,14 @@ class ResourceViewSet(BaseRLSViewSet):
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
]
|
||||
prefetch_for_includes = {
|
||||
"__all__": [],
|
||||
"provider": [
|
||||
Prefetch(
|
||||
"provider", queryset=Provider.all_objects.select_related("resources")
|
||||
)
|
||||
],
|
||||
}
|
||||
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of
|
||||
# the provider through the provider group)
|
||||
required_permissions = []
|
||||
@@ -1790,41 +1963,257 @@ class ResourceViewSet(BaseRLSViewSet):
|
||||
user_roles = get_role(self.request.user)
|
||||
if user_roles.unlimited_visibility:
|
||||
# User has unlimited visibility, return all scans
|
||||
queryset = Resource.objects.filter(tenant_id=self.request.tenant_id)
|
||||
queryset = Resource.all_objects.filter(tenant_id=self.request.tenant_id)
|
||||
else:
|
||||
# User lacks permission, filter providers based on provider groups associated with the role
|
||||
queryset = Resource.objects.filter(
|
||||
queryset = Resource.all_objects.filter(
|
||||
tenant_id=self.request.tenant_id, provider__in=get_providers(user_roles)
|
||||
)
|
||||
|
||||
search_value = self.request.query_params.get("filter[search]", None)
|
||||
if search_value:
|
||||
# Django's ORM will build a LEFT JOIN and OUTER JOIN on the "through" table, resulting in duplicates
|
||||
# The duplicates then require a `distinct` query
|
||||
search_query = SearchQuery(
|
||||
search_value, config="simple", search_type="plain"
|
||||
)
|
||||
queryset = queryset.filter(
|
||||
Q(tags__key=search_value)
|
||||
| Q(tags__value=search_value)
|
||||
| Q(tags__text_search=search_query)
|
||||
| Q(tags__key__contains=search_value)
|
||||
| Q(tags__value__contains=search_value)
|
||||
| Q(uid=search_value)
|
||||
| Q(name=search_value)
|
||||
| Q(region=search_value)
|
||||
| Q(service=search_value)
|
||||
| Q(type=search_value)
|
||||
| Q(text_search=search_query)
|
||||
| Q(uid__contains=search_value)
|
||||
| Q(name__contains=search_value)
|
||||
| Q(region__contains=search_value)
|
||||
| Q(service__contains=search_value)
|
||||
| Q(type__contains=search_value)
|
||||
Q(text_search=search_query) | Q(tags__text_search=search_query)
|
||||
).distinct()
|
||||
|
||||
return queryset
|
||||
|
||||
def _optimize_tags_loading(self, queryset):
|
||||
"""Optimize tags loading with prefetch_related to avoid N+1 queries"""
|
||||
# Use prefetch_related to load all tags in a single query
|
||||
return queryset.prefetch_related(
|
||||
Prefetch(
|
||||
"tags",
|
||||
queryset=ResourceTag.objects.filter(
|
||||
tenant_id=self.request.tenant_id
|
||||
).select_related(),
|
||||
to_attr="prefetched_tags",
|
||||
)
|
||||
)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action in ["metadata", "metadata_latest"]:
|
||||
return ResourceMetadataSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_filterset_class(self):
|
||||
if self.action in ["latest", "metadata_latest"]:
|
||||
return LatestResourceFilter
|
||||
return ResourceFilter
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
# Do not apply filters when retrieving specific resource
|
||||
if self.action == "retrieve":
|
||||
return queryset
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
filtered_queryset = self.filter_queryset(self.get_queryset())
|
||||
return self.paginate_by_pk(
|
||||
request,
|
||||
filtered_queryset,
|
||||
manager=Resource.all_objects,
|
||||
select_related=["provider"],
|
||||
prefetch_related=["findings"],
|
||||
)
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
queryset = self._optimize_tags_loading(self.get_queryset())
|
||||
instance = get_object_or_404(queryset, pk=kwargs.get("pk"))
|
||||
mapping_ids = list(
|
||||
ResourceFindingMapping.objects.filter(
|
||||
resource=instance, tenant_id=request.tenant_id
|
||||
).values_list("finding_id", flat=True)
|
||||
)
|
||||
latest_findings = (
|
||||
Finding.all_objects.filter(id__in=mapping_ids, tenant_id=request.tenant_id)
|
||||
.order_by("uid", "-inserted_at")
|
||||
.distinct("uid")
|
||||
)
|
||||
setattr(instance, "latest_findings", latest_findings)
|
||||
serializer = self.get_serializer(instance)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="latest")
|
||||
def latest(self, request):
|
||||
tenant_id = request.tenant_id
|
||||
filtered_queryset = self.filter_queryset(self.get_queryset())
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
filtered_queryset = filtered_queryset.filter(
|
||||
tenant_id=tenant_id, provider__scan__in=latest_scan_ids
|
||||
)
|
||||
|
||||
return self.paginate_by_pk(
|
||||
request,
|
||||
filtered_queryset,
|
||||
manager=Resource.all_objects,
|
||||
select_related=["provider"],
|
||||
prefetch_related=["findings"],
|
||||
)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="metadata")
|
||||
def metadata(self, request):
|
||||
# Force filter validation
|
||||
self.filter_queryset(self.get_queryset())
|
||||
|
||||
tenant_id = request.tenant_id
|
||||
query_params = request.query_params
|
||||
|
||||
queryset = ResourceScanSummary.objects.filter(tenant_id=tenant_id)
|
||||
|
||||
if scans := query_params.get("filter[scan__in]") or query_params.get(
|
||||
"filter[scan]"
|
||||
):
|
||||
queryset = queryset.filter(scan_id__in=scans.split(","))
|
||||
else:
|
||||
exact = query_params.get("filter[inserted_at]")
|
||||
gte = query_params.get("filter[inserted_at__gte]")
|
||||
lte = query_params.get("filter[inserted_at__lte]")
|
||||
|
||||
date_filters = {}
|
||||
if exact:
|
||||
date = parse_date(exact)
|
||||
datetime_start = datetime.combine(
|
||||
date, datetime.min.time(), tzinfo=timezone.utc
|
||||
)
|
||||
datetime_end = datetime_start + timedelta(days=1)
|
||||
date_filters["scan_id__gte"] = uuid7_start(
|
||||
datetime_to_uuid7(datetime_start)
|
||||
)
|
||||
date_filters["scan_id__lt"] = uuid7_start(
|
||||
datetime_to_uuid7(datetime_end)
|
||||
)
|
||||
else:
|
||||
if gte:
|
||||
date_start = parse_date(gte)
|
||||
datetime_start = datetime.combine(
|
||||
date_start, datetime.min.time(), tzinfo=timezone.utc
|
||||
)
|
||||
date_filters["scan_id__gte"] = uuid7_start(
|
||||
datetime_to_uuid7(datetime_start)
|
||||
)
|
||||
if lte:
|
||||
date_end = parse_date(lte)
|
||||
datetime_end = datetime.combine(
|
||||
date_end + timedelta(days=1),
|
||||
datetime.min.time(),
|
||||
tzinfo=timezone.utc,
|
||||
)
|
||||
date_filters["scan_id__lt"] = uuid7_start(
|
||||
datetime_to_uuid7(datetime_end)
|
||||
)
|
||||
|
||||
if date_filters:
|
||||
queryset = queryset.filter(**date_filters)
|
||||
|
||||
if service_filter := query_params.get("filter[service]") or query_params.get(
|
||||
"filter[service__in]"
|
||||
):
|
||||
queryset = queryset.filter(service__in=service_filter.split(","))
|
||||
if region_filter := query_params.get("filter[region]") or query_params.get(
|
||||
"filter[region__in]"
|
||||
):
|
||||
queryset = queryset.filter(region__in=region_filter.split(","))
|
||||
if resource_type_filter := query_params.get("filter[type]") or query_params.get(
|
||||
"filter[type__in]"
|
||||
):
|
||||
queryset = queryset.filter(
|
||||
resource_type__in=resource_type_filter.split(",")
|
||||
)
|
||||
|
||||
services = list(
|
||||
queryset.values_list("service", flat=True).distinct().order_by("service")
|
||||
)
|
||||
regions = list(
|
||||
queryset.values_list("region", flat=True).distinct().order_by("region")
|
||||
)
|
||||
resource_types = list(
|
||||
queryset.values_list("resource_type", flat=True)
|
||||
.exclude(resource_type__isnull=True)
|
||||
.exclude(resource_type__exact="")
|
||||
.distinct()
|
||||
.order_by("resource_type")
|
||||
)
|
||||
|
||||
result = {
|
||||
"services": services,
|
||||
"regions": regions,
|
||||
"types": resource_types,
|
||||
}
|
||||
|
||||
serializer = self.get_serializer(data=result)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_name="metadata_latest",
|
||||
url_path="metadata/latest",
|
||||
)
|
||||
def metadata_latest(self, request):
|
||||
tenant_id = request.tenant_id
|
||||
query_params = request.query_params
|
||||
|
||||
latest_scans_queryset = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
)
|
||||
|
||||
queryset = ResourceScanSummary.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id__in=latest_scans_queryset.values_list("id", flat=True),
|
||||
)
|
||||
|
||||
if service_filter := query_params.get("filter[service]") or query_params.get(
|
||||
"filter[service__in]"
|
||||
):
|
||||
queryset = queryset.filter(service__in=service_filter.split(","))
|
||||
if region_filter := query_params.get("filter[region]") or query_params.get(
|
||||
"filter[region__in]"
|
||||
):
|
||||
queryset = queryset.filter(region__in=region_filter.split(","))
|
||||
if resource_type_filter := query_params.get("filter[type]") or query_params.get(
|
||||
"filter[type__in]"
|
||||
):
|
||||
queryset = queryset.filter(
|
||||
resource_type__in=resource_type_filter.split(",")
|
||||
)
|
||||
|
||||
services = list(
|
||||
queryset.values_list("service", flat=True).distinct().order_by("service")
|
||||
)
|
||||
regions = list(
|
||||
queryset.values_list("region", flat=True).distinct().order_by("region")
|
||||
)
|
||||
resource_types = list(
|
||||
queryset.values_list("resource_type", flat=True)
|
||||
.exclude(resource_type__isnull=True)
|
||||
.exclude(resource_type__exact="")
|
||||
.distinct()
|
||||
.order_by("resource_type")
|
||||
)
|
||||
|
||||
result = {
|
||||
"services": services,
|
||||
"regions": regions,
|
||||
"types": resource_types,
|
||||
}
|
||||
|
||||
serializer = self.get_serializer(data=result)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
@@ -1943,17 +2332,7 @@ class FindingViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
search_value, config="simple", search_type="plain"
|
||||
)
|
||||
|
||||
resource_match = Resource.all_objects.filter(
|
||||
text_search=search_query,
|
||||
id__in=ResourceFindingMapping.objects.filter(
|
||||
resource_id=OuterRef("pk"),
|
||||
tenant_id=tenant_id,
|
||||
).values("resource_id"),
|
||||
)
|
||||
|
||||
queryset = queryset.filter(
|
||||
Q(text_search=search_query) | Q(Exists(resource_match))
|
||||
)
|
||||
queryset = queryset.filter(text_search=search_query)
|
||||
|
||||
return queryset
|
||||
|
||||
@@ -2056,9 +2435,12 @@ class FindingViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
|
||||
# ToRemove: Temporary fallback mechanism
|
||||
if not queryset.exists():
|
||||
scan_ids = Scan.objects.filter(
|
||||
raw_scans_ids = Scan.objects.filter(
|
||||
tenant_id=tenant_id, **scan_based_filters
|
||||
).values_list("id", flat=True)
|
||||
).values_list("id", "unique_resource_count")
|
||||
scan_ids = [
|
||||
scan_id for scan_id, count in raw_scans_ids if count and count > 0
|
||||
]
|
||||
for scan_id in scan_ids:
|
||||
backfill_scan_resource_summaries_task.apply_async(
|
||||
kwargs={"tenant_id": tenant_id, "scan_id": scan_id}
|
||||
@@ -2144,7 +2526,12 @@ class FindingViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
)
|
||||
latest_scans_ids = list(latest_scans_queryset.values_list("id", flat=True))
|
||||
raw_latest_scans_ids = list(
|
||||
latest_scans_queryset.values_list("id", "unique_resource_count")
|
||||
)
|
||||
latest_scans_ids = [
|
||||
scan_id for scan_id, count in raw_latest_scans_ids if count and count > 0
|
||||
]
|
||||
|
||||
queryset = ResourceScanSummary.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
@@ -3038,9 +3425,9 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(tags=["Overview"])
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Overview"],
|
||||
providers=extend_schema(
|
||||
summary="Get aggregated provider data",
|
||||
description=(
|
||||
"Retrieve an aggregated overview of findings and resources grouped by providers. "
|
||||
@@ -3081,7 +3468,7 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
class OverviewViewSet(BaseRLSViewSet):
|
||||
queryset = ComplianceOverview.objects.all()
|
||||
queryset = ScanSummary.objects.all()
|
||||
http_method_names = ["get"]
|
||||
ordering = ["-inserted_at"]
|
||||
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of
|
||||
@@ -3092,19 +3479,10 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
role = get_role(self.request.user)
|
||||
providers = get_providers(role)
|
||||
|
||||
def _get_filtered_queryset(model):
|
||||
if role.unlimited_visibility:
|
||||
return model.all_objects.filter(tenant_id=self.request.tenant_id)
|
||||
return model.all_objects.filter(
|
||||
tenant_id=self.request.tenant_id, scan__provider__in=providers
|
||||
)
|
||||
if not role.unlimited_visibility:
|
||||
self.allowed_providers = providers
|
||||
|
||||
if self.action == "providers":
|
||||
return _get_filtered_queryset(Finding)
|
||||
elif self.action in ("findings", "findings_severity", "services"):
|
||||
return _get_filtered_queryset(ScanSummary)
|
||||
else:
|
||||
return super().get_queryset()
|
||||
return ScanSummary.all_objects.filter(tenant_id=self.request.tenant_id)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "providers":
|
||||
@@ -3137,18 +3515,24 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
@action(detail=False, methods=["get"], url_name="providers")
|
||||
def providers(self, request):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
|
||||
findings_aggregated = (
|
||||
ScanSummary.all_objects.filter(
|
||||
tenant_id=tenant_id, scan_id__in=latest_scan_ids
|
||||
)
|
||||
queryset.filter(scan_id__in=latest_scan_ids)
|
||||
.values(
|
||||
"scan__provider_id",
|
||||
provider=F("scan__provider__provider"),
|
||||
@@ -3184,7 +3568,7 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
)
|
||||
|
||||
return Response(
|
||||
OverviewProviderSerializer(overview, many=True).data,
|
||||
self.get_serializer(overview, many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -3193,9 +3577,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
filtered_queryset = self.filter_queryset(queryset)
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
@@ -3232,9 +3623,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
filtered_queryset = self.filter_queryset(queryset)
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
@@ -3254,7 +3652,7 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
for item in severity_counts:
|
||||
severity_data[item["severity"]] = item["count"]
|
||||
|
||||
serializer = OverviewSeveritySerializer(severity_data)
|
||||
serializer = self.get_serializer(severity_data)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="services")
|
||||
@@ -3262,9 +3660,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
filtered_queryset = self.filter_queryset(queryset)
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
@@ -3282,11 +3687,12 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
.order_by("service")
|
||||
)
|
||||
|
||||
serializer = OverviewServiceSerializer(services_data, many=True)
|
||||
serializer = self.get_serializer(services_data, many=True)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(tags=["Schedule"])
|
||||
@extend_schema_view(
|
||||
daily=extend_schema(
|
||||
summary="Create a daily schedule scan for a given provider",
|
||||
@@ -3481,3 +3887,54 @@ class LighthouseConfigViewSet(BaseRLSViewSet):
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Processor"],
|
||||
summary="List all processors",
|
||||
description="Retrieve a list of all configured processors with options for filtering by various criteria.",
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
tags=["Processor"],
|
||||
summary="Retrieve processor details",
|
||||
description="Fetch detailed information about a specific processor by its ID.",
|
||||
),
|
||||
create=extend_schema(
|
||||
tags=["Processor"],
|
||||
summary="Create a new processor",
|
||||
description="Register a new processor with the system, providing necessary configuration details. There can "
|
||||
"only be one processor of each type per tenant.",
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
tags=["Processor"],
|
||||
summary="Partially update a processor",
|
||||
description="Modify certain fields of an existing processor without affecting other settings.",
|
||||
),
|
||||
destroy=extend_schema(
|
||||
tags=["Processor"],
|
||||
summary="Delete a processor",
|
||||
description="Remove a processor from the system by its ID.",
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
@method_decorator(CACHE_DECORATOR, name="retrieve")
|
||||
class ProcessorViewSet(BaseRLSViewSet):
|
||||
queryset = Processor.objects.all()
|
||||
serializer_class = ProcessorSerializer
|
||||
http_method_names = ["get", "post", "patch", "delete"]
|
||||
filterset_class = ProcessorFilter
|
||||
ordering = ["processor_type", "-inserted_at"]
|
||||
# RBAC required permissions
|
||||
required_permissions = [Permissions.MANAGE_ACCOUNT]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Processor.objects.filter(tenant_id=self.request.tenant_id)
|
||||
return queryset
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return ProcessorCreateSerializer
|
||||
elif self.action == "partial_update":
|
||||
return ProcessorUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import string
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -20,3 +22,89 @@ class MaximumLengthValidator:
|
||||
return _(
|
||||
f"Your password must contain no more than {self.max_length} characters."
|
||||
)
|
||||
|
||||
|
||||
class SpecialCharactersValidator:
|
||||
def __init__(self, special_characters=None, min_special_characters=1):
|
||||
# Use string.punctuation if no custom characters provided
|
||||
self.special_characters = special_characters or string.punctuation
|
||||
self.min_special_characters = min_special_characters
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if (
|
||||
sum(1 for char in password if char in self.special_characters)
|
||||
< self.min_special_characters
|
||||
):
|
||||
raise ValidationError(
|
||||
_("This password must contain at least one special character."),
|
||||
code="password_no_special_characters",
|
||||
params={
|
||||
"special_characters": self.special_characters,
|
||||
"min_special_characters": self.min_special_characters,
|
||||
},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least one special character from: {self.special_characters}"
|
||||
)
|
||||
|
||||
|
||||
class UppercaseValidator:
|
||||
def __init__(self, min_uppercase=1):
|
||||
self.min_uppercase = min_uppercase
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.isupper()) < self.min_uppercase:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_uppercase)d uppercase letter."
|
||||
),
|
||||
code="password_no_uppercase_letters",
|
||||
params={"min_uppercase": self.min_uppercase},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_uppercase} uppercase letter."
|
||||
)
|
||||
|
||||
|
||||
class LowercaseValidator:
|
||||
def __init__(self, min_lowercase=1):
|
||||
self.min_lowercase = min_lowercase
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.islower()) < self.min_lowercase:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_lowercase)d lowercase letter."
|
||||
),
|
||||
code="password_no_lowercase_letters",
|
||||
params={"min_lowercase": self.min_lowercase},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_lowercase} lowercase letter."
|
||||
)
|
||||
|
||||
|
||||
class NumericValidator:
|
||||
def __init__(self, min_numeric=1):
|
||||
self.min_numeric = min_numeric
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.isdigit()) < self.min_numeric:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_numeric)d numeric character."
|
||||
),
|
||||
code="password_no_numeric_characters",
|
||||
params={"min_numeric": self.min_numeric},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_numeric} numeric character."
|
||||
)
|
||||
|
||||
@@ -11,6 +11,7 @@ SECRET_KEY = env("SECRET_KEY", default="secret")
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
|
||||
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
||||
USE_X_FORWARDED_HOST = True
|
||||
|
||||
# Application definition
|
||||
|
||||
@@ -158,6 +159,30 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.SpecialCharactersValidator",
|
||||
"OPTIONS": {
|
||||
"min_special_characters": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.UppercaseValidator",
|
||||
"OPTIONS": {
|
||||
"min_uppercase": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.LowercaseValidator",
|
||||
"OPTIONS": {
|
||||
"min_lowercase": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.NumericValidator",
|
||||
"OPTIONS": {
|
||||
"min_numeric": 1,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
SIMPLE_JWT = {
|
||||
@@ -248,3 +273,7 @@ X_FRAME_OPTIONS = "DENY"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
# SAML requirement
|
||||
CSRF_COOKIE_SECURE = True
|
||||
SESSION_COOKIE_SECURE = True
|
||||
|
||||
@@ -4,6 +4,7 @@ from config.env import env
|
||||
IGNORED_EXCEPTIONS = [
|
||||
# Provider is not connected due to credentials errors
|
||||
"is not connected",
|
||||
"ProviderConnectionError",
|
||||
# Authentication Errors from AWS
|
||||
"InvalidToken",
|
||||
"AccessDeniedException",
|
||||
@@ -16,7 +17,7 @@ IGNORED_EXCEPTIONS = [
|
||||
"InternalServerErrorException",
|
||||
"AccessDenied",
|
||||
"No Shodan API Key", # Shodan Check
|
||||
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
|
||||
"RequestLimitExceeded", # For now, we don't want to log the RequestLimitExceeded errors
|
||||
"ThrottlingException",
|
||||
"Rate exceeded",
|
||||
"SubscriptionRequiredException",
|
||||
@@ -42,7 +43,9 @@ IGNORED_EXCEPTIONS = [
|
||||
"AWSAccessKeyIDInvalidError",
|
||||
"AWSSessionTokenExpiredError",
|
||||
"EndpointConnectionError", # AWS Service is not available in a region
|
||||
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
# The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an
|
||||
# unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
"Pool is closed",
|
||||
# Authentication Errors from GCP
|
||||
"ClientAuthenticationError",
|
||||
"AuthorizationFailed",
|
||||
@@ -71,7 +74,7 @@ IGNORED_EXCEPTIONS = [
|
||||
|
||||
def before_send(event, hint):
|
||||
"""
|
||||
before_send handles the Sentry events in order to sent them or not
|
||||
before_send handles the Sentry events in order to send them or not
|
||||
"""
|
||||
# Ignore logs with the ignored_exceptions
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-objects
|
||||
|
||||
@@ -25,9 +25,18 @@ SOCIALACCOUNT_EMAIL_AUTHENTICATION = True
|
||||
SOCIALACCOUNT_EMAIL_AUTHENTICATION_AUTO_CONNECT = True
|
||||
SOCIALACCOUNT_ADAPTER = "api.adapters.ProwlerSocialAccountAdapter"
|
||||
|
||||
# SAML keys
|
||||
SAML_PUBLIC_CERT = env("SAML_PUBLIC_CERT", default="")
|
||||
SAML_PRIVATE_KEY = env("SAML_PRIVATE_KEY", default="")
|
||||
|
||||
# def inline(pem: str) -> str:
|
||||
# return "".join(
|
||||
# line.strip()
|
||||
# for line in pem.splitlines()
|
||||
# if "CERTIFICATE" not in line and "KEY" not in line
|
||||
# )
|
||||
|
||||
|
||||
# # SAML keys (TODO: Validate certificates)
|
||||
# SAML_PUBLIC_CERT = inline(env("SAML_PUBLIC_CERT", default=""))
|
||||
# SAML_PRIVATE_KEY = inline(env("SAML_PRIVATE_KEY", default=""))
|
||||
|
||||
SOCIALACCOUNT_PROVIDERS = {
|
||||
"google": {
|
||||
@@ -60,12 +69,14 @@ SOCIALACCOUNT_PROVIDERS = {
|
||||
"entity_id": "urn:prowler.com:sp",
|
||||
},
|
||||
"advanced": {
|
||||
"x509cert": SAML_PUBLIC_CERT,
|
||||
"private_key": SAML_PRIVATE_KEY,
|
||||
# TODO: Validate certificates
|
||||
# "x509cert": SAML_PUBLIC_CERT,
|
||||
# "private_key": SAML_PRIVATE_KEY,
|
||||
# "authn_request_signed": True,
|
||||
# "want_message_signed": True,
|
||||
# "want_assertion_signed": True,
|
||||
"reject_idp_initiated_sso": False,
|
||||
"name_id_format": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
||||
"authn_request_signed": True,
|
||||
"want_assertion_signed": True,
|
||||
"want_message_signed": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -23,11 +23,13 @@ from api.models import (
|
||||
Invitation,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
ResourceTagMapping,
|
||||
Role,
|
||||
SAMLConfiguration,
|
||||
SAMLDomainIndex,
|
||||
@@ -44,12 +46,19 @@ from api.v1.serializers import TokenSerializer
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
|
||||
TODAY = str(datetime.today().date())
|
||||
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
|
||||
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
|
||||
TEST_USER = "dev@prowler.com"
|
||||
TEST_PASSWORD = "testing_psswd"
|
||||
|
||||
|
||||
def today_after_n_days(n_days: int) -> str:
|
||||
return datetime.strftime(
|
||||
datetime.today().date() + timedelta(days=n_days), "%Y-%m-%d"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def enforce_test_user_db_connection(django_db_setup, django_db_blocker):
|
||||
"""Ensure tests use the test user for database connections."""
|
||||
@@ -381,8 +390,27 @@ def providers_fixture(tenants_fixture):
|
||||
tenant_id=tenant.id,
|
||||
scanner_args={"key1": "value1", "key2": {"key21": "value21"}},
|
||||
)
|
||||
provider6 = Provider.objects.create(
|
||||
provider="m365",
|
||||
uid="m365.test.com",
|
||||
alias="m365_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return provider1, provider2, provider3, provider4, provider5
|
||||
return provider1, provider2, provider3, provider4, provider5, provider6
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def processor_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
processor = Processor.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
processor_type="mutelist",
|
||||
configuration="Mutelist:\n Accounts:\n *:\n Checks:\n iam_user_hardware_mfa_enabled:\n "
|
||||
" Regions:\n - *\n Resources:\n - *",
|
||||
)
|
||||
|
||||
return processor
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -634,6 +662,7 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
"servicename": "ec2",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
@@ -660,6 +689,7 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description orange juice",
|
||||
"servicename": "s3",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
muted=True,
|
||||
@@ -1115,10 +1145,73 @@ def latest_scan_finding(authenticated_client, providers_fixture, resources_fixtu
|
||||
return finding
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_resource(authenticated_client, providers_fixture):
|
||||
provider = providers_fixture[0]
|
||||
tenant_id = str(providers_fixture[0].tenant_id)
|
||||
scan = Scan.objects.create(
|
||||
name="latest completed scan for resource",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
resource = Resource.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
uid="latest_resource_uid",
|
||||
name="Latest Resource",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
type="instance",
|
||||
metadata='{"test": "metadata"}',
|
||||
details='{"test": "details"}',
|
||||
)
|
||||
|
||||
resource_tag = ResourceTag.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
key="environment",
|
||||
value="test",
|
||||
)
|
||||
ResourceTagMapping.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
resource=resource,
|
||||
tag=resource_tag,
|
||||
)
|
||||
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
uid="test_finding_uid_latest",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="test status extended ",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact extended",
|
||||
severity=Severity.critical,
|
||||
raw_result={
|
||||
"status": Status.FAIL,
|
||||
"impact": Severity.critical,
|
||||
"severity": Severity.critical,
|
||||
},
|
||||
tags={"test": "latest"},
|
||||
check_id="test_check_id_latest",
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id_latest",
|
||||
"Description": "test description latest",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding.add_resources([resource])
|
||||
|
||||
backfill_resource_scan_summaries(tenant_id, str(scan.id))
|
||||
return resource
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def saml_setup(tenants_fixture):
|
||||
tenant_id = tenants_fixture[0].id
|
||||
domain = "example.com"
|
||||
domain = "prowler.com"
|
||||
|
||||
SAMLDomainIndex.objects.create(email_domain=domain, tenant_id=tenant_id)
|
||||
|
||||
|
||||
@@ -2,10 +2,10 @@ import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.tasks import perform_scheduled_scan_task
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import Provider, Scan, StateChoices
|
||||
|
||||
|
||||
@@ -24,15 +24,9 @@ def schedule_provider_scan(provider_instance: Provider):
|
||||
if PeriodicTask.objects.filter(
|
||||
interval=schedule, name=task_name, task="scan-perform-scheduled"
|
||||
).exists():
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "There is already a scheduled scan for this provider.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/provider_id"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
raise ConflictException(
|
||||
detail="There is already a scheduled scan for this provider.",
|
||||
pointer="/data/attributes/provider_id",
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
|
||||
@@ -31,6 +31,7 @@ from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
|
||||
KubernetesISO27001,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_m365 import M365ISO27001
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
@@ -90,6 +91,7 @@ COMPLIANCE_CLASS_MAP = {
|
||||
"m365": [
|
||||
(lambda name: name.startswith("cis_"), M365CIS),
|
||||
(lambda name: name == "prowler_threatscore_m365", ProwlerThreatScoreM365),
|
||||
(lambda name: name.startswith("iso27001_"), M365ISO27001),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from datetime import datetime, timezone
|
||||
from celery.utils.log import get_task_logger
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
from django.db import IntegrityError, OperationalError
|
||||
from django.db.models import Case, Count, IntegerField, Sum, When
|
||||
from django.db.models import Case, Count, IntegerField, OuterRef, Subquery, Sum, When
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.compliance import (
|
||||
@@ -14,9 +14,11 @@ from api.compliance import (
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_utils import create_objects_in_batches, rls_transaction
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Processor,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceScanSummary,
|
||||
@@ -26,7 +28,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
)
|
||||
from api.models import StatusChoices as FindingStatus
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.utils import initialize_prowler_provider, return_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.outputs.finding import Finding as ProwlerFinding
|
||||
from prowler.lib.scan.scan import Scan as ProwlerScan
|
||||
@@ -132,14 +134,28 @@ def perform_prowler_scan(
|
||||
scan_instance.started_at = datetime.now(tz=timezone.utc)
|
||||
scan_instance.save()
|
||||
|
||||
# Find the mutelist processor if it exists
|
||||
with rls_transaction(tenant_id):
|
||||
try:
|
||||
mutelist_processor = Processor.objects.get(
|
||||
tenant_id=tenant_id, processor_type=Processor.ProcessorChoices.MUTELIST
|
||||
)
|
||||
except Processor.DoesNotExist:
|
||||
mutelist_processor = None
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing mutelist rules: {e}")
|
||||
mutelist_processor = None
|
||||
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
try:
|
||||
prowler_provider = initialize_prowler_provider(provider_instance)
|
||||
prowler_provider = initialize_prowler_provider(
|
||||
provider_instance, mutelist_processor
|
||||
)
|
||||
provider_instance.connected = True
|
||||
except Exception as e:
|
||||
provider_instance.connected = False
|
||||
exc = ValueError(
|
||||
exc = ProviderConnectionError(
|
||||
f"Provider {provider_instance.provider} is not connected: {e}"
|
||||
)
|
||||
finally:
|
||||
@@ -149,7 +165,8 @@ def perform_prowler_scan(
|
||||
provider_instance.save()
|
||||
|
||||
# If the provider is not connected, raise an exception outside the transaction.
|
||||
# If raised within the transaction, the transaction will be rolled back and the provider will not be marked as not connected.
|
||||
# If raised within the transaction, the transaction will be rolled back and the provider will not be marked
|
||||
# as not connected.
|
||||
if exc:
|
||||
raise exc
|
||||
|
||||
@@ -273,6 +290,9 @@ def perform_prowler_scan(
|
||||
if not last_first_seen_at:
|
||||
last_first_seen_at = datetime.now(tz=timezone.utc)
|
||||
|
||||
# If the finding is muted at this time the reason must be the configured Mutelist
|
||||
muted_reason = "Muted by mutelist" if finding.muted else None
|
||||
|
||||
# Create the finding
|
||||
finding_instance = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
@@ -288,6 +308,7 @@ def perform_prowler_scan(
|
||||
scan=scan_instance,
|
||||
first_seen_at=last_first_seen_at,
|
||||
muted=finding.muted,
|
||||
muted_reason=muted_reason,
|
||||
compliance=finding.compliance,
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
@@ -355,12 +376,16 @@ def perform_prowler_scan(
|
||||
def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Aggregates findings for a given scan and stores the results in the ScanSummary table.
|
||||
Also updates the failed_findings_count for each resource based on the latest findings.
|
||||
|
||||
This function retrieves all findings associated with a given `scan_id` and calculates various
|
||||
metrics such as counts of failed, passed, and muted findings, as well as their deltas (new,
|
||||
changed, unchanged). The results are grouped by `check_id`, `service`, `severity`, and `region`.
|
||||
These aggregated metrics are then stored in the `ScanSummary` table.
|
||||
|
||||
Additionally, it updates the failed_findings_count field for each resource based on the most
|
||||
recent findings for each finding.uid.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The ID of the tenant to which the scan belongs.
|
||||
scan_id (str): The ID of the scan for which findings need to be aggregated.
|
||||
@@ -380,6 +405,8 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
- muted_new: Muted findings with a delta of 'new'.
|
||||
- muted_changed: Muted findings with a delta of 'changed'.
|
||||
"""
|
||||
_update_resource_failed_findings_count(tenant_id, scan_id)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
@@ -504,6 +531,53 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
ScanSummary.objects.bulk_create(scan_aggregations, batch_size=3000)
|
||||
|
||||
|
||||
def _update_resource_failed_findings_count(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Update the failed_findings_count field for resources based on the latest findings.
|
||||
|
||||
This function calculates the number of failed findings for each resource by:
|
||||
1. Getting the latest finding for each finding.uid
|
||||
2. Counting failed findings per resource
|
||||
3. Updating the failed_findings_count field for each resource
|
||||
|
||||
Args:
|
||||
tenant_id (str): The ID of the tenant to which the scan belongs.
|
||||
scan_id (str): The ID of the scan for which to update resource counts.
|
||||
"""
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan = Scan.objects.get(pk=scan_id)
|
||||
provider_id = scan.provider_id
|
||||
|
||||
resources = list(
|
||||
Resource.all_objects.filter(tenant_id=tenant_id, provider_id=provider_id)
|
||||
)
|
||||
|
||||
# For each resource, calculate failed findings count based on latest findings
|
||||
for resource in resources:
|
||||
with rls_transaction(tenant_id):
|
||||
# Get the latest finding for each finding.uid that affects this resource
|
||||
latest_findings_subquery = (
|
||||
Finding.all_objects.filter(
|
||||
tenant_id=tenant_id, uid=OuterRef("uid"), resources=resource
|
||||
)
|
||||
.order_by("-inserted_at")
|
||||
.values("id")[:1]
|
||||
)
|
||||
|
||||
# Count failed findings from the latest findings
|
||||
failed_count = Finding.all_objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
resources=resource,
|
||||
id__in=Subquery(latest_findings_subquery),
|
||||
status=FindingStatus.FAIL,
|
||||
muted=False,
|
||||
).count()
|
||||
|
||||
resource.failed_findings_count = failed_count
|
||||
resource.save(update_fields=["failed_findings_count"])
|
||||
|
||||
|
||||
def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Create detailed compliance requirement overview records for a scan.
|
||||
@@ -526,7 +600,7 @@ def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
with rls_transaction(tenant_id):
|
||||
scan_instance = Scan.objects.get(pk=scan_id)
|
||||
provider_instance = scan_instance.provider
|
||||
prowler_provider = initialize_prowler_provider(provider_instance)
|
||||
prowler_provider = return_prowler_provider(provider_instance)
|
||||
|
||||
# Get check status data by region from findings
|
||||
check_status_by_region = {}
|
||||
|
||||
@@ -37,6 +37,26 @@ from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Helper function to perform tasks after a scan is completed.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant ID under which the scan was performed.
|
||||
scan_id (str): The ID of the scan that was performed.
|
||||
provider_id (str): The primary key of the Provider instance that was scanned.
|
||||
"""
|
||||
create_compliance_requirements_task.apply_async(
|
||||
kwargs={"tenant_id": tenant_id, "scan_id": scan_id}
|
||||
)
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="provider-connection-check")
|
||||
@set_tenant
|
||||
def check_provider_connection_task(provider_id: str):
|
||||
@@ -103,13 +123,7 @@ def perform_scan_task(
|
||||
checks_to_execute=checks_to_execute,
|
||||
)
|
||||
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id, scan_id),
|
||||
create_compliance_requirements_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
_perform_scan_complete_tasks(tenant_id, scan_id, provider_id)
|
||||
|
||||
return result
|
||||
|
||||
@@ -214,20 +228,12 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
)
|
||||
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id, scan_instance.id),
|
||||
create_compliance_requirements_task.si(
|
||||
tenant_id=tenant_id, scan_id=str(scan_instance.id)
|
||||
),
|
||||
generate_outputs.si(
|
||||
scan_id=str(scan_instance.id), provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
).apply_async()
|
||||
_perform_scan_complete_tasks(tenant_id, str(scan_instance.id), provider_id)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@shared_task(name="scan-summary")
|
||||
@shared_task(name="scan-summary", queue="overview")
|
||||
def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
@@ -243,7 +249,7 @@ def delete_tenant_task(tenant_id: str):
|
||||
queue="scan-reports",
|
||||
)
|
||||
@set_tenant(keep_tenant=True)
|
||||
def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
|
||||
def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
"""
|
||||
Process findings in batches and generate output files in multiple formats.
|
||||
|
||||
@@ -381,7 +387,7 @@ def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
return backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews")
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="overview")
|
||||
def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates detailed compliance requirement records for a scan.
|
||||
|
||||
@@ -3,9 +3,9 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.beat import schedule_provider_scan
|
||||
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import Scan
|
||||
|
||||
|
||||
@@ -48,10 +48,29 @@ class TestScheduleProviderScan:
|
||||
with patch("tasks.tasks.perform_scheduled_scan_task.apply_async"):
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
# Now, try scheduling again, should raise ValidationError
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
# Now, try scheduling again, should raise ConflictException
|
||||
with pytest.raises(ConflictException) as exc_info:
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
assert "There is already a scheduled scan for this provider." in str(
|
||||
exc_info.value
|
||||
)
|
||||
|
||||
def test_remove_periodic_task(self, providers_fixture):
|
||||
provider_instance = providers_fixture[0]
|
||||
|
||||
assert Scan.objects.count() == 0
|
||||
with patch("tasks.tasks.perform_scheduled_scan_task.apply_async"):
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
assert Scan.objects.count() == 1
|
||||
scan = Scan.objects.first()
|
||||
periodic_task = scan.scheduler_task
|
||||
assert periodic_task is not None
|
||||
|
||||
periodic_task.delete()
|
||||
|
||||
scan.refresh_from_db()
|
||||
# Assert the scan still exists but its scheduler_task is set to None
|
||||
# Otherwise, Scan.DoesNotExist would be raised
|
||||
assert Scan.objects.get(id=scan.id).scheduler_task is None
|
||||
|
||||
@@ -7,11 +7,13 @@ import pytest
|
||||
from tasks.jobs.scan import (
|
||||
_create_finding_delta,
|
||||
_store_resources,
|
||||
_update_resource_failed_findings_count,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
)
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
@@ -158,6 +160,7 @@ class TestPerformScan:
|
||||
assert scan_finding.raw_result == finding.raw
|
||||
assert scan_finding.muted
|
||||
assert scan_finding.compliance == finding.compliance
|
||||
assert scan_finding.muted_reason == "Muted by mutelist"
|
||||
|
||||
assert scan_resource.tenant == tenant
|
||||
assert scan_resource.uid == finding.resource_uid
|
||||
@@ -203,7 +206,7 @@ class TestPerformScan:
|
||||
provider_id = str(provider.id)
|
||||
checks_to_execute = ["check1", "check2"]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(ProviderConnectionError):
|
||||
perform_prowler_scan(tenant_id, scan_id, provider_id, checks_to_execute)
|
||||
|
||||
scan.refresh_from_db()
|
||||
@@ -399,9 +402,7 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.return_prowler_provider") as mock_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -427,9 +428,7 @@ class TestCreateComplianceRequirements:
|
||||
"us-east-1",
|
||||
"us-west-2",
|
||||
]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
mock_prowler_provider.return_value = mock_prowler_provider_instance
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"cis_1.4_aws": {
|
||||
@@ -512,9 +511,7 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.return_prowler_provider") as mock_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -557,9 +554,7 @@ class TestCreateComplianceRequirements:
|
||||
"us-east-1",
|
||||
"us-west-2",
|
||||
]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
mock_prowler_provider.return_value = mock_prowler_provider_instance
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"test_compliance": {
|
||||
@@ -607,9 +602,7 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.return_prowler_provider") as mock_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -641,9 +634,7 @@ class TestCreateComplianceRequirements:
|
||||
mock_prowler_provider_instance.get_regions.side_effect = AttributeError(
|
||||
"No get_regions method"
|
||||
)
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
mock_prowler_provider.return_value = mock_prowler_provider_instance
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"kubernetes_cis": {
|
||||
@@ -676,9 +667,7 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.return_prowler_provider") as mock_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -704,9 +693,7 @@ class TestCreateComplianceRequirements:
|
||||
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
mock_prowler_provider.return_value = mock_prowler_provider_instance
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"cis_1.4_aws": {
|
||||
@@ -743,9 +730,7 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.return_prowler_provider") as mock_prowler_provider,
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
@@ -759,7 +744,7 @@ class TestCreateComplianceRequirements:
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
|
||||
mock_initialize_prowler_provider.side_effect = Exception(
|
||||
mock_prowler_provider.side_effect = Exception(
|
||||
"Provider initialization failed"
|
||||
)
|
||||
|
||||
@@ -774,9 +759,7 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.return_prowler_provider") as mock_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -800,9 +783,7 @@ class TestCreateComplianceRequirements:
|
||||
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
mock_prowler_provider.return_value = mock_prowler_provider_instance
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {}
|
||||
|
||||
@@ -821,8 +802,8 @@ class TestCreateComplianceRequirements:
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
"tasks.jobs.scan.return_prowler_provider"
|
||||
) as mock_return_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -862,9 +843,7 @@ class TestCreateComplianceRequirements:
|
||||
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
mock_return_prowler_provider.return_value = mock_prowler_provider_instance
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"cis_1.4_aws": {
|
||||
@@ -898,8 +877,8 @@ class TestCreateComplianceRequirements:
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
"tasks.jobs.scan.return_prowler_provider"
|
||||
) as mock_return_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -911,7 +890,6 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
providers_fixture[0]
|
||||
|
||||
mock_findings_filter.return_value = []
|
||||
|
||||
@@ -921,7 +899,7 @@ class TestCreateComplianceRequirements:
|
||||
"us-west-2",
|
||||
"eu-west-1",
|
||||
]
|
||||
mock_initialize_prowler_provider.return_value = mock_prowler_provider
|
||||
mock_return_prowler_provider.return_value = mock_prowler_provider
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"test_compliance": {
|
||||
@@ -990,8 +968,8 @@ class TestCreateComplianceRequirements:
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
"tasks.jobs.scan.return_prowler_provider"
|
||||
) as mock_return_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -1009,7 +987,7 @@ class TestCreateComplianceRequirements:
|
||||
|
||||
mock_prowler_provider = MagicMock()
|
||||
mock_prowler_provider.get_regions.return_value = ["us-east-1", "us-west-2"]
|
||||
mock_initialize_prowler_provider.return_value = mock_prowler_provider
|
||||
mock_return_prowler_provider.return_value = mock_prowler_provider
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"test_compliance": {
|
||||
@@ -1077,8 +1055,8 @@ class TestCreateComplianceRequirements:
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
"tasks.jobs.scan.return_prowler_provider"
|
||||
) as mock_return_prowler_provider,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE"
|
||||
) as mock_compliance_template,
|
||||
@@ -1090,13 +1068,12 @@ class TestCreateComplianceRequirements:
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
providers_fixture[0]
|
||||
|
||||
mock_findings_filter.return_value = []
|
||||
|
||||
mock_prowler_provider = MagicMock()
|
||||
mock_prowler_provider.get_regions.return_value = ["us-east-1", "us-west-2"]
|
||||
mock_initialize_prowler_provider.return_value = mock_prowler_provider
|
||||
mock_return_prowler_provider.return_value = mock_prowler_provider
|
||||
|
||||
mock_compliance_template.__getitem__.return_value = {
|
||||
"test_compliance": {
|
||||
@@ -1190,3 +1167,85 @@ class TestCreateComplianceRequirements:
|
||||
assert len(req_2_objects) == 2
|
||||
assert all(obj.requirement_status == "PASS" for obj in req_1_objects)
|
||||
assert all(obj.requirement_status == "FAIL" for obj in req_2_objects)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUpdateResourceFailedFindingsCount:
|
||||
@patch("api.models.Resource.all_objects.filter")
|
||||
@patch("api.models.Finding.all_objects.filter")
|
||||
def test_failed_findings_count_update(
|
||||
self,
|
||||
mock_finding_filter,
|
||||
mock_resource_filter,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
scan.provider = provider
|
||||
scan.save()
|
||||
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
|
||||
resource1 = MagicMock()
|
||||
resource1.uid = "res-1"
|
||||
resource1.failed_findings_count = None
|
||||
resource1.save = MagicMock()
|
||||
|
||||
resource2 = MagicMock()
|
||||
resource2.uid = "res-2"
|
||||
resource2.failed_findings_count = None
|
||||
resource2.save = MagicMock()
|
||||
|
||||
mock_resource_filter.return_value = [resource1, resource2]
|
||||
|
||||
fake_subquery_qs = MagicMock()
|
||||
fake_subquery_qs.order_by.return_value = fake_subquery_qs
|
||||
fake_subquery_qs.values.return_value = fake_subquery_qs
|
||||
fake_subquery_qs.__getitem__.return_value = fake_subquery_qs
|
||||
|
||||
def finding_filter_side_effect(*args, **kwargs):
|
||||
if "status" in kwargs:
|
||||
qs_count = MagicMock()
|
||||
if kwargs.get("resources") == resource1:
|
||||
qs_count.count.return_value = 3
|
||||
else:
|
||||
qs_count.count.return_value = 0
|
||||
return qs_count
|
||||
return fake_subquery_qs
|
||||
|
||||
mock_finding_filter.side_effect = finding_filter_side_effect
|
||||
|
||||
_update_resource_failed_findings_count(tenant_id, scan_id)
|
||||
|
||||
# resource1 should have been updated to 3
|
||||
assert resource1.failed_findings_count == 3
|
||||
resource1.save.assert_called_once_with(update_fields=["failed_findings_count"])
|
||||
|
||||
# resource2 should have been updated to 0
|
||||
assert resource2.failed_findings_count == 0
|
||||
resource2.save.assert_called_once_with(update_fields=["failed_findings_count"])
|
||||
|
||||
@patch("api.models.Resource.all_objects.filter", return_value=[])
|
||||
@patch("api.models.Finding.all_objects.filter")
|
||||
def test_no_resources_no_error(
|
||||
self,
|
||||
mock_finding_filter,
|
||||
mock_resource_filter,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
scan.provider = provider
|
||||
scan.save()
|
||||
|
||||
_update_resource_failed_findings_count(str(tenant.id), str(scan.id))
|
||||
|
||||
mock_finding_filter.assert_not_called()
|
||||
|
||||
@@ -3,9 +3,10 @@ from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from tasks.tasks import generate_outputs
|
||||
from tasks.tasks import _perform_scan_complete_tasks, generate_outputs_task
|
||||
|
||||
|
||||
# TODO Move this to outputs/reports jobs
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateOutputs:
|
||||
def setup_method(self):
|
||||
@@ -17,7 +18,7 @@ class TestGenerateOutputs:
|
||||
with patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -99,7 +100,7 @@ class TestGenerateOutputs:
|
||||
mock_compress.return_value = "/tmp/zipped.zip"
|
||||
mock_upload.return_value = "s3://bucket/zipped.zip"
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -150,7 +151,7 @@ class TestGenerateOutputs:
|
||||
True,
|
||||
]
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id="scan",
|
||||
provider_id="provider",
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -208,7 +209,7 @@ class TestGenerateOutputs:
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
generate_outputs(
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -276,7 +277,7 @@ class TestGenerateOutputs:
|
||||
}
|
||||
},
|
||||
):
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -346,7 +347,7 @@ class TestGenerateOutputs:
|
||||
):
|
||||
mock_summary.return_value.exists.return_value = True
|
||||
|
||||
result = generate_outputs(
|
||||
result = generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
@@ -407,9 +408,31 @@ class TestGenerateOutputs:
|
||||
),
|
||||
):
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_outputs(
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
|
||||
|
||||
class TestScanCompleteTasks:
|
||||
@patch("tasks.tasks.create_compliance_requirements_task.apply_async")
|
||||
@patch("tasks.tasks.perform_scan_summary_task.si")
|
||||
@patch("tasks.tasks.generate_outputs_task.si")
|
||||
def test_scan_complete_tasks(
|
||||
self, mock_outputs_task, mock_scan_summary_task, mock_compliance_tasks
|
||||
):
|
||||
_perform_scan_complete_tasks("tenant-id", "scan-id", "provider-id")
|
||||
mock_compliance_tasks.assert_called_once_with(
|
||||
kwargs={"tenant_id": "tenant-id", "scan_id": "scan-id"},
|
||||
)
|
||||
mock_scan_summary_task.assert_called_once_with(
|
||||
scan_id="scan-id",
|
||||
tenant_id="tenant-id",
|
||||
)
|
||||
mock_outputs_task.assert_called_once_with(
|
||||
scan_id="scan-id",
|
||||
provider_id="provider-id",
|
||||
tenant_id="tenant-id",
|
||||
)
|
||||
|
||||
128
api/tests/performance/scenarios/compliance.py
Normal file
@@ -0,0 +1,128 @@
|
||||
import random
|
||||
from collections import defaultdict
|
||||
|
||||
import requests
|
||||
from locust import events, task
|
||||
from utils.helpers import APIUserBase, get_api_token, get_auth_headers
|
||||
|
||||
GLOBAL = {
|
||||
"token": None,
|
||||
"available_scans_info": {},
|
||||
}
|
||||
SUPPORTED_COMPLIANCE_IDS = {
|
||||
"aws": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
|
||||
"gcp": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
|
||||
"azure": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
|
||||
"m365": ["cis_4.0", "iso27001_2022", "prowler_threatscore"],
|
||||
}
|
||||
|
||||
|
||||
def _get_random_scan() -> tuple:
|
||||
provider_type = random.choice(list(GLOBAL["available_scans_info"].keys()))
|
||||
scan_info = random.choice(GLOBAL["available_scans_info"][provider_type])
|
||||
return provider_type, scan_info
|
||||
|
||||
|
||||
def _get_random_compliance_id(provider: str) -> str:
|
||||
return f"{random.choice(SUPPORTED_COMPLIANCE_IDS[provider])}_{provider}"
|
||||
|
||||
|
||||
def _get_compliance_available_scans_by_provider_type(host: str, token: str) -> dict:
|
||||
excluded_providers = ["kubernetes"]
|
||||
|
||||
response_dict = defaultdict(list)
|
||||
provider_response = requests.get(
|
||||
f"{host}/providers?fields[providers]=id,provider&filter[connected]=true",
|
||||
headers=get_auth_headers(token),
|
||||
)
|
||||
for provider in provider_response.json()["data"]:
|
||||
provider_id = provider["id"]
|
||||
provider_type = provider["attributes"]["provider"]
|
||||
if provider_type in excluded_providers:
|
||||
continue
|
||||
|
||||
scan_response = requests.get(
|
||||
f"{host}/scans?fields[scans]=id&filter[provider]={provider_id}&filter[state]=completed",
|
||||
headers=get_auth_headers(token),
|
||||
)
|
||||
scan_data = scan_response.json()["data"]
|
||||
if not scan_data:
|
||||
continue
|
||||
scan_id = scan_data[0]["id"]
|
||||
response_dict[provider_type].append(scan_id)
|
||||
return response_dict
|
||||
|
||||
|
||||
def _get_compliance_regions_from_scan(host: str, token: str, scan_id: str) -> list:
|
||||
response = requests.get(
|
||||
f"{host}/compliance-overviews/metadata?filter[scan_id]={scan_id}",
|
||||
headers=get_auth_headers(token),
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to get scan: {response.text}"
|
||||
return response.json()["data"]["attributes"]["regions"]
|
||||
|
||||
|
||||
@events.test_start.add_listener
|
||||
def on_test_start(environment, **kwargs):
|
||||
GLOBAL["token"] = get_api_token(environment.host)
|
||||
scans_by_provider = _get_compliance_available_scans_by_provider_type(
|
||||
environment.host, GLOBAL["token"]
|
||||
)
|
||||
scan_info = defaultdict(list)
|
||||
for provider, scans in scans_by_provider.items():
|
||||
for scan in scans:
|
||||
scan_info[provider].append(
|
||||
{
|
||||
"scan_id": scan,
|
||||
"regions": _get_compliance_regions_from_scan(
|
||||
environment.host, GLOBAL["token"], scan
|
||||
),
|
||||
}
|
||||
)
|
||||
GLOBAL["available_scans_info"] = scan_info
|
||||
|
||||
|
||||
class APIUser(APIUserBase):
|
||||
def on_start(self):
|
||||
self.token = GLOBAL["token"]
|
||||
|
||||
@task(3)
|
||||
def compliance_overviews_default(self):
|
||||
provider_type, scan_info = _get_random_scan()
|
||||
name = f"/compliance-overviews ({provider_type})"
|
||||
endpoint = f"/compliance-overviews?" f"filter[scan_id]={scan_info['scan_id']}"
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(2)
|
||||
def compliance_overviews_region(self):
|
||||
provider_type, scan_info = _get_random_scan()
|
||||
name = f"/compliance-overviews?filter[region] ({provider_type})"
|
||||
endpoint = (
|
||||
f"/compliance-overviews"
|
||||
f"?filter[scan_id]={scan_info['scan_id']}"
|
||||
f"&filter[region]={random.choice(scan_info['regions'])}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task(2)
|
||||
def compliance_overviews_requirements(self):
|
||||
provider_type, scan_info = _get_random_scan()
|
||||
compliance_id = _get_random_compliance_id(provider_type)
|
||||
name = f"/compliance-overviews/requirements ({compliance_id})"
|
||||
endpoint = (
|
||||
f"/compliance-overviews/requirements"
|
||||
f"?filter[scan_id]={scan_info['scan_id']}"
|
||||
f"&filter[compliance_id]={compliance_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
|
||||
@task
|
||||
def compliance_overviews_attributes(self):
|
||||
provider_type, _ = _get_random_scan()
|
||||
compliance_id = _get_random_compliance_id(provider_type)
|
||||
name = f"/compliance-overviews/attributes ({compliance_id})"
|
||||
endpoint = (
|
||||
f"/compliance-overviews/attributes"
|
||||
f"?filter[compliance_id]={compliance_id}"
|
||||
)
|
||||
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
|
||||
@@ -4,7 +4,10 @@ from dash import html
|
||||
|
||||
|
||||
def create_provider_card(
|
||||
provider: str, provider_logo: str, account_type: str, filtered_data
|
||||
provider: str,
|
||||
provider_logo: str,
|
||||
account_type: str,
|
||||
filtered_data,
|
||||
) -> List[html.Div]:
|
||||
"""
|
||||
Card to display the provider's name and icon.
|
||||
|
||||
@@ -245,6 +245,31 @@ def create_service_dropdown(services: list) -> html.Div:
|
||||
)
|
||||
|
||||
|
||||
def create_provider_dropdown(providers: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the provider.
|
||||
Args:
|
||||
providers (list): List of providers.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the provider.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Provider:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="provider-filter",
|
||||
options=[{"label": i, "value": i} for i in providers],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_status_dropdown(status: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the status.
|
||||
|
||||
@@ -9,9 +9,11 @@ def create_layout_overview(
|
||||
download_button_xlsx: html.Button,
|
||||
severity_dropdown: html.Div,
|
||||
service_dropdown: html.Div,
|
||||
provider_dropdown: html.Div,
|
||||
table_row_dropdown: html.Div,
|
||||
status_dropdown: html.Div,
|
||||
table_div_header: html.Div,
|
||||
amount_providers: int,
|
||||
) -> html.Div:
|
||||
"""
|
||||
Create the layout of the dashboard.
|
||||
@@ -47,9 +49,10 @@ def create_layout_overview(
|
||||
[
|
||||
html.Div([severity_dropdown], className=""),
|
||||
html.Div([service_dropdown], className=""),
|
||||
html.Div([provider_dropdown], className=""),
|
||||
html.Div([status_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-3",
|
||||
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-4",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
@@ -59,7 +62,7 @@ def create_layout_overview(
|
||||
html.Div(className="flex", id="k8s_card", n_clicks=0),
|
||||
html.Div(className="flex", id="m365_card", n_clicks=0),
|
||||
],
|
||||
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-5",
|
||||
className=f"grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-{amount_providers}",
|
||||
),
|
||||
html.H4(
|
||||
"Count of Findings by severity",
|
||||
|
||||
@@ -346,34 +346,27 @@ def display_data(
|
||||
if item == "nan" or item.__class__.__name__ != "str":
|
||||
region_filter_options.remove(item)
|
||||
|
||||
# Convert ASSESSMENTDATE to datetime
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"], errors="coerce")
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
data["ASSESSMENTDAY"] = data["ASSESSMENTDATE"].dt.date
|
||||
|
||||
# Choosing the date that is the most recent
|
||||
data_values = data["ASSESSMENTDATE"].unique()
|
||||
data_values.sort()
|
||||
data_values = data_values[::-1]
|
||||
aux = []
|
||||
# Find the latest timestamp per account per day
|
||||
latest_per_account_day = data.groupby(["ACCOUNTID", "ASSESSMENTDAY"])[
|
||||
"ASSESSMENTDATE"
|
||||
].transform("max")
|
||||
|
||||
data_values = [str(i) for i in data_values]
|
||||
for value in data_values:
|
||||
if value.split(" ")[0] not in [aux[i].split(" ")[0] for i in range(len(aux))]:
|
||||
aux.append(value)
|
||||
data_values = [str(i) for i in aux]
|
||||
# Keep only rows with the latest timestamp for each account and day
|
||||
data = data[data["ASSESSMENTDATE"] == latest_per_account_day]
|
||||
|
||||
data = data[data["ASSESSMENTDATE"].isin(data_values)]
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].apply(lambda x: x.split(" ")[0])
|
||||
# Prepare the date filter options (unique days, as strings)
|
||||
options_date = sorted(data["ASSESSMENTDAY"].astype(str).unique(), reverse=True)
|
||||
|
||||
options_date = data["ASSESSMENTDATE"].unique()
|
||||
options_date.sort()
|
||||
options_date = options_date[::-1]
|
||||
|
||||
# Filter DATE
|
||||
# Filter by selected date (as string)
|
||||
if date_filter_analytics in options_date:
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
data = data[data["ASSESSMENTDAY"].astype(str) == date_filter_analytics]
|
||||
else:
|
||||
date_filter_analytics = options_date[0]
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
data = data[data["ASSESSMENTDAY"].astype(str) == date_filter_analytics]
|
||||
|
||||
if data.empty:
|
||||
fig = px.pie()
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# Standard library imports
|
||||
import csv
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
@@ -20,7 +19,6 @@ from dash.dependencies import Input, Output
|
||||
# Config import
|
||||
from dashboard.config import (
|
||||
critical_color,
|
||||
encoding_format,
|
||||
fail_color,
|
||||
folder_path_overview,
|
||||
high_color,
|
||||
@@ -38,6 +36,7 @@ from dashboard.lib.cards import create_provider_card
|
||||
from dashboard.lib.dropdowns import (
|
||||
create_account_dropdown,
|
||||
create_date_dropdown,
|
||||
create_provider_dropdown,
|
||||
create_region_dropdown,
|
||||
create_service_dropdown,
|
||||
create_severity_dropdown,
|
||||
@@ -45,6 +44,7 @@ from dashboard.lib.dropdowns import (
|
||||
create_table_row_dropdown,
|
||||
)
|
||||
from dashboard.lib.layouts import create_layout_overview
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
# Suppress warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
@@ -54,11 +54,13 @@ warnings.filterwarnings("ignore")
|
||||
csv_files = []
|
||||
|
||||
for file in glob.glob(os.path.join(folder_path_overview, "*.csv")):
|
||||
with open(file, "r", newline="", encoding=encoding_format) as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
try:
|
||||
df = pd.read_csv(file, sep=";")
|
||||
num_rows = len(df)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
except Exception:
|
||||
logger.error(f"Error reading file {file}")
|
||||
|
||||
|
||||
# Import logos providers
|
||||
@@ -190,7 +192,13 @@ else:
|
||||
data.rename(columns={"RESOURCE_ID": "RESOURCE_UID"}, inplace=True)
|
||||
|
||||
# Remove dupplicates on the finding_uid colummn but keep the last one taking into account the timestamp
|
||||
data = data.sort_values("TIMESTAMP").drop_duplicates("FINDING_UID", keep="last")
|
||||
data["DATE"] = data["TIMESTAMP"].dt.date
|
||||
data = (
|
||||
data.sort_values("TIMESTAMP")
|
||||
.groupby(["DATE", "FINDING_UID"], as_index=False)
|
||||
.last()
|
||||
)
|
||||
data["TIMESTAMP"] = pd.to_datetime(data["TIMESTAMP"])
|
||||
|
||||
data["ASSESSMENT_TIME"] = data["TIMESTAMP"].dt.strftime("%Y-%m-%d")
|
||||
data_valid = pd.DataFrame()
|
||||
@@ -298,6 +306,13 @@ else:
|
||||
|
||||
service_dropdown = create_service_dropdown(services)
|
||||
|
||||
# Provider Dropdown
|
||||
providers = ["All"] + list(data["PROVIDER"].unique())
|
||||
providers = [
|
||||
x for x in providers if str(x) != "nan" and x.__class__.__name__ == "str"
|
||||
]
|
||||
provider_dropdown = create_provider_dropdown(providers)
|
||||
|
||||
# Create the download button
|
||||
download_button_csv = html.Button(
|
||||
"Download this table as CSV",
|
||||
@@ -479,9 +494,11 @@ else:
|
||||
download_button_xlsx,
|
||||
severity_dropdown,
|
||||
service_dropdown,
|
||||
provider_dropdown,
|
||||
table_row_dropdown,
|
||||
status_dropdown,
|
||||
table_div_header,
|
||||
len(data["PROVIDER"].unique()),
|
||||
)
|
||||
|
||||
|
||||
@@ -508,6 +525,8 @@ else:
|
||||
Output("severity-filter", "value"),
|
||||
Output("severity-filter", "options"),
|
||||
Output("service-filter", "value"),
|
||||
Output("provider-filter", "value"),
|
||||
Output("provider-filter", "options"),
|
||||
Output("service-filter", "options"),
|
||||
Output("table-rows", "value"),
|
||||
Output("table-rows", "options"),
|
||||
@@ -526,6 +545,7 @@ else:
|
||||
Input("download_link_xlsx", "n_clicks"),
|
||||
Input("severity-filter", "value"),
|
||||
Input("service-filter", "value"),
|
||||
Input("provider-filter", "value"),
|
||||
Input("table-rows", "value"),
|
||||
Input("status-filter", "value"),
|
||||
Input("search-input", "value"),
|
||||
@@ -549,6 +569,7 @@ def filter_data(
|
||||
n_clicks_xlsx,
|
||||
severity_values,
|
||||
service_values,
|
||||
provider_values,
|
||||
table_row_values,
|
||||
status_values,
|
||||
search_value,
|
||||
@@ -874,6 +895,25 @@ def filter_data(
|
||||
filtered_data["SERVICE_NAME"].isin(updated_service_values)
|
||||
]
|
||||
|
||||
provider_filter_options = ["All"] + list(filtered_data["PROVIDER"].unique())
|
||||
|
||||
# Filter Provider
|
||||
if provider_values == ["All"]:
|
||||
updated_provider_values = filtered_data["PROVIDER"].unique()
|
||||
elif "All" in provider_values and len(provider_values) > 1:
|
||||
# Remove 'All' from the list
|
||||
provider_values.remove("All")
|
||||
updated_provider_values = provider_values
|
||||
elif len(provider_values) == 0:
|
||||
updated_provider_values = filtered_data["PROVIDER"].unique()
|
||||
provider_values = ["All"]
|
||||
else:
|
||||
updated_provider_values = provider_values
|
||||
|
||||
filtered_data = filtered_data[
|
||||
filtered_data["PROVIDER"].isin(updated_provider_values)
|
||||
]
|
||||
|
||||
# Filter Status
|
||||
if status_values == ["All"]:
|
||||
updated_status_values = filtered_data["STATUS"].unique()
|
||||
@@ -1094,25 +1134,17 @@ def filter_data(
|
||||
|
||||
table_row_options = []
|
||||
|
||||
# Take the values from the table_row_values
|
||||
# Calculate table row options as percentages
|
||||
percentages = [0.05, 0.10, 0.25, 0.50, 0.75, 1.0]
|
||||
total_rows = len(filtered_data)
|
||||
for pct in percentages:
|
||||
value = max(1, int(total_rows * pct))
|
||||
label = f"{int(pct * 100)}%"
|
||||
table_row_options.append({"label": label, "value": value})
|
||||
|
||||
# Default to 25% if not set
|
||||
if table_row_values is None or table_row_values == -1:
|
||||
if len(filtered_data) < 25:
|
||||
table_row_values = len(filtered_data)
|
||||
else:
|
||||
table_row_values = 25
|
||||
|
||||
if len(filtered_data) < 25:
|
||||
table_row_values = len(filtered_data)
|
||||
|
||||
if len(filtered_data) >= 25:
|
||||
table_row_options.append(25)
|
||||
if len(filtered_data) >= 50:
|
||||
table_row_options.append(50)
|
||||
if len(filtered_data) >= 75:
|
||||
table_row_options.append(75)
|
||||
if len(filtered_data) >= 100:
|
||||
table_row_options.append(100)
|
||||
table_row_options.append(len(filtered_data))
|
||||
table_row_values = table_row_options[0]["value"]
|
||||
|
||||
# For the values that are nan or none, replace them with ""
|
||||
filtered_data = filtered_data.replace({np.nan: ""})
|
||||
@@ -1347,21 +1379,36 @@ def filter_data(
|
||||
]
|
||||
|
||||
# Create Provider Cards
|
||||
aws_card = create_provider_card(
|
||||
"aws", aws_provider_logo, "Accounts", full_filtered_data
|
||||
)
|
||||
azure_card = create_provider_card(
|
||||
"azure", azure_provider_logo, "Subscriptions", full_filtered_data
|
||||
)
|
||||
gcp_card = create_provider_card(
|
||||
"gcp", gcp_provider_logo, "Projects", full_filtered_data
|
||||
)
|
||||
k8s_card = create_provider_card(
|
||||
"kubernetes", ks8_provider_logo, "Clusters", full_filtered_data
|
||||
)
|
||||
m365_card = create_provider_card(
|
||||
"m365", m365_provider_logo, "Accounts", full_filtered_data
|
||||
)
|
||||
if "aws" in list(data["PROVIDER"].unique()):
|
||||
aws_card = create_provider_card(
|
||||
"aws", aws_provider_logo, "Accounts", full_filtered_data
|
||||
)
|
||||
else:
|
||||
aws_card = None
|
||||
if "azure" in list(data["PROVIDER"].unique()):
|
||||
azure_card = create_provider_card(
|
||||
"azure", azure_provider_logo, "Subscriptions", full_filtered_data
|
||||
)
|
||||
else:
|
||||
azure_card = None
|
||||
if "gcp" in list(data["PROVIDER"].unique()):
|
||||
gcp_card = create_provider_card(
|
||||
"gcp", gcp_provider_logo, "Projects", full_filtered_data
|
||||
)
|
||||
else:
|
||||
gcp_card = None
|
||||
if "kubernetes" in list(data["PROVIDER"].unique()):
|
||||
k8s_card = create_provider_card(
|
||||
"kubernetes", ks8_provider_logo, "Clusters", full_filtered_data
|
||||
)
|
||||
else:
|
||||
k8s_card = None
|
||||
if "m365" in list(data["PROVIDER"].unique()):
|
||||
m365_card = create_provider_card(
|
||||
"m365", m365_provider_logo, "Accounts", full_filtered_data
|
||||
)
|
||||
else:
|
||||
m365_card = None
|
||||
|
||||
# Subscribe to Prowler Cloud card
|
||||
subscribe_card = [
|
||||
@@ -1445,6 +1492,8 @@ def filter_data(
|
||||
severity_values,
|
||||
severity_filter_options,
|
||||
service_values,
|
||||
provider_values,
|
||||
provider_filter_options,
|
||||
service_filter_options,
|
||||
table_row_values,
|
||||
table_row_options,
|
||||
|
||||
@@ -212,34 +212,9 @@ Each check **must** populate the report with an unique identifier for the audite
|
||||
- Resource Name — `report.resource_name`.
|
||||
- The name of the Github resource. In the case of a repository, this is just the repository name. For full repository names use the resource `full_name`.
|
||||
|
||||
### Using the Audit Configuration
|
||||
### Configurable Checks in Prowler
|
||||
|
||||
Prowler has a [configuration file](../tutorials/configuration_file.md) which is used to pass certain configuration values to the checks. For example:
|
||||
|
||||
```python title="ec2_securitygroup_with_many_ingress_egress_rules.py"
|
||||
class ec2_securitygroup_with_many_ingress_egress_rules(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
|
||||
max_security_group_rules = ec2_client.audit_config.get(
|
||||
"max_security_group_rules", 50
|
||||
)
|
||||
for security_group_arn, security_group in ec2_client.security_groups.items():
|
||||
```
|
||||
|
||||
We use the `audit_config` object to retrieve the value of `max_security_group_rules`, which is the default value of 50 if the configuration value is not present.
|
||||
|
||||
The configuration file is located at [`prowler/config/config.yaml`](https://github.com/prowler-cloud/prowler/blob/master/prowler/config/config.yaml) and is used to pass certain configuration values to the checks. For example:
|
||||
|
||||
```yaml title="config.yaml"
|
||||
aws:
|
||||
max_security_group_rules: 50
|
||||
```
|
||||
|
||||
This `audit_config` object is a Python dictionary that stores values read from the configuration file. It can be accessed by the check using the `audit_config` attribute of the service client.
|
||||
|
||||
???+ note
|
||||
Always use the `dictionary.get(value, default)` syntax to ensure a default value is set when the configuration value is not present.
|
||||
See [Configurable Checks](./configurable-checks.md) for detailed information on making checks configurable using the `audit_config` object and configuration file.
|
||||
|
||||
## Metadata Structure for Prowler Checks
|
||||
|
||||
|
||||
46
docs/developer-guide/configurable-checks.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Configurable Checks in Prowler
|
||||
|
||||
Prowler empowers users to extend and adapt cloud security coverage by making checks configurable through the use of the `audit_config` object. This approach enables customization of checks to meet specific requirements through a configuration file.
|
||||
|
||||
## Understanding the `audit_config` Object
|
||||
|
||||
The `audit_config` object is a dictionary attached to each provider's service client (for example, `<service_name>_client.audit_config`). This object loads configuration values from the main configuration file (`prowler/config/config.yaml`). Use `audit_config` to make checks flexible and user-configurable.
|
||||
|
||||
## Using `audit_config` to Configure Checks
|
||||
|
||||
Retrieve configuration values in a check by using the `.get()` method on the `audit_config` object. For example, to get the minimum number of Availability Zones for Lambda from the configuration file, use the following code. If the value is not set in the configuration, the check defaults to 2:
|
||||
|
||||
```python
|
||||
LAMBDA_MIN_AZS = awslambda_client.audit_config.get("lambda_min_azs", 2)
|
||||
```
|
||||
|
||||
Always provide a default value in `.get()` to ensure the check works even if the configuration is missing the variable.
|
||||
|
||||
### Example: Security Group Rule Limit
|
||||
|
||||
```python title="ec2_securitygroup_with_many_ingress_egress_rules.py"
|
||||
class ec2_securitygroup_with_many_ingress_egress_rules(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
max_security_group_rules = ec2_client.audit_config.get(
|
||||
"max_security_group_rules", 50
|
||||
)
|
||||
for security_group_arn, security_group in ec2_client.security_groups.items():
|
||||
# ... check logic ...
|
||||
```
|
||||
|
||||
## Required File Updates for Configurable Variables
|
||||
|
||||
When adding a new configurable check to Prowler, update the following files:
|
||||
|
||||
- **Configuration File:** Add the new variable under the relevant provider or service section in `prowler/config/config.yaml`.
|
||||
```yaml
|
||||
# aws.awslambda_function_vpc_multi_az
|
||||
lambda_min_azs: 2
|
||||
```
|
||||
- **Test Fixtures:** If tests depend on this configuration, add the variable to `tests/config/fixtures/config.yaml`.
|
||||
- **Documentation:** Document the new variable in the list of configurable checks in `docs/tutorials/configuration_file.md`.
|
||||
|
||||
For a complete list of checks that already support configuration, see the [Configuration File Tutorial](../tutorials/configuration_file.md).
|
||||
|
||||
This approach ensures that checks are easily configurable, making Prowler highly adaptable to different environments and requirements.
|
||||
@@ -164,3 +164,13 @@ If you enjoy swag, we’d love to thank you for your contribution with laptop st
|
||||
To request swag: Share your pull request details in our [Slack workspace](https://goto.prowler.com/slack).
|
||||
|
||||
You can also reach out to Toni de la Fuente on [Twitter](https://twitter.com/ToniBlyx)—his DMs are open!
|
||||
|
||||
# Testing a Pull Request from a Specific Branch
|
||||
|
||||
To test Prowler from a specific branch (for example, to try out changes from a pull request before it is merged), you can use `pipx` to install directly from GitHub:
|
||||
|
||||
```sh
|
||||
pipx install "git+https://github.com/prowler-cloud/prowler.git@branch-name"
|
||||
```
|
||||
|
||||
Replace `branch-name` with the name of the branch you want to test. This will install Prowler in an isolated environment, allowing you to try out the changes safely.
|
||||
134
docs/developer-guide/lighthouse.md
Normal file
@@ -0,0 +1,134 @@
|
||||
# Extending Prowler Lighthouse
|
||||
|
||||
This guide helps developers customize and extend Prowler Lighthouse by adding or modifying AI agents.
|
||||
|
||||
## Understanding AI Agents
|
||||
|
||||
AI agents combine Large Language Models (LLMs) with specialized tools that provide environmental context. These tools can include API calls, system command execution, or any function-wrapped capability.
|
||||
|
||||
### Types of AI Agents
|
||||
|
||||
AI agents fall into two main categories:
|
||||
|
||||
- **Autonomous Agents**: Freely chooses from available tools to complete tasks, adapting their approach based on context. They decide which tools to use and when.
|
||||
- **Workflow Agents**: Follows structured paths with predefined logic. They execute specific tool sequences and can include conditional logic.
|
||||
|
||||
Prowler Lighthouse is an autonomous agent - selecting the right tool(s) based on the users query.
|
||||
|
||||
???+ note
|
||||
To learn more about AI agents, read [Anthropic's blog post on building effective agents](https://www.anthropic.com/engineering/building-effective-agents).
|
||||
|
||||
### LLM Dependency
|
||||
|
||||
The autonomous nature of agents depends on the underlying LLM. Autonomous agents using identical system prompts and tools but powered by different LLM providers might approach user queries differently. Agent with one LLM might solve a problem efficiently, while with another it might take a different route or fail entirely.
|
||||
|
||||
After evaluating multiple LLM providers (OpenAI, Gemini, Claude, LLama) based on tool calling features and response accuracy, we recommend using the `gpt-4o` model.
|
||||
|
||||
## Prowler Lighthouse Architecture
|
||||
|
||||
Prowler Lighthouse uses a multi-agent architecture orchestrated by the [Langgraph-Supervisor](https://www.npmjs.com/package/@langchain/langgraph-supervisor) library.
|
||||
|
||||
### Architecture Components
|
||||
|
||||
<img src="../../tutorials/img/lighthouse-architecture.png" alt="Prowler Lighthouse architecture">
|
||||
|
||||
Prowler Lighthouse integrates with the NextJS application:
|
||||
|
||||
- The [Langgraph-Supervisor](https://www.npmjs.com/package/@langchain/langgraph-supervisor) library integrates directly with NextJS
|
||||
- The system uses the authenticated user session to interact with the Prowler API server
|
||||
- Agents only access data the current user is authorized to view
|
||||
- Session management operates automatically, ensuring Role-Based Access Control (RBAC) is maintained
|
||||
|
||||
## Available Prowler AI Agents
|
||||
|
||||
The following specialized AI agents are available in Prowler:
|
||||
|
||||
### Agent Overview
|
||||
|
||||
- **provider_agent**: Fetches information about cloud providers connected to Prowler
|
||||
- **user_info_agent**: Retrieves information about Prowler users
|
||||
- **scans_agent**: Fetches information about Prowler scans
|
||||
- **compliance_agent**: Retrieves compliance overviews across scans
|
||||
- **findings_agent**: Fetches information about individual findings across scans
|
||||
- **overview_agent**: Retrieves overview information (providers, findings by status and severity, etc.)
|
||||
|
||||
## How to Add New Capabilities
|
||||
|
||||
### Updating the Supervisor Prompt
|
||||
|
||||
The supervisor agent controls system behavior, tone, and capabilities. You can find the supervisor prompt at: [https://github.com/prowler-cloud/prowler/blob/master/ui/lib/lighthouse/prompts.ts](https://github.com/prowler-cloud/prowler/blob/master/ui/lib/lighthouse/prompts.ts)
|
||||
|
||||
#### Supervisor Prompt Modifications
|
||||
|
||||
Modifying the supervisor prompt allows you to:
|
||||
|
||||
- Change personality or response style
|
||||
- Add new high-level capabilities
|
||||
- Modify task delegation to specialized agents
|
||||
- Set up guardrails (query types to answer or decline)
|
||||
|
||||
???+ note
|
||||
The supervisor agent should not have its own tools. This design keeps the system modular and maintainable.
|
||||
|
||||
### How to Create New Specialized Agents
|
||||
|
||||
The supervisor agent and all specialized agents are defined in the `route.ts` file. The supervisor agent uses [langgraph-supervisor](https://www.npmjs.com/package/@langchain/langgraph-supervisor), while other agents use the prebuilt [create-react-agent](https://langchain-ai.github.io/langgraphjs/how-tos/create-react-agent/).
|
||||
|
||||
To add new capabilities or all Lighthouse to interact with other APIs, create additional specialized agents:
|
||||
|
||||
1. First determine what the new agent would do. Create a detailed prompt defining the agent's purpose and capabilities. You can see an example from [here](https://github.com/prowler-cloud/prowler/blob/master/ui/lib/lighthouse/prompts.ts#L359-L385).
|
||||
???+ note
|
||||
Ensure that the new agent's capabilities don't collide with existing agents. For example, if there's already a *findings_agent* that talks to findings APIs don't create a new agent to do the same.
|
||||
|
||||
2. Create necessary tools for the agents to access specific data or perform actions. A tool is a specialized function that extends the capabilities of LLM by allowing it to access external data or APIs. A tool is triggered by LLM based on the description of the tool and the user's query.
|
||||
For example, the description of `getScanTool` is "Fetches detailed information about a specific scan by its ID." If the description doesn't convey what the tool is capable of doing, LLM will not invoke the function. If the description of `getScanTool` was set to something random or not set at all, LLM will not answer queries like "Give me the critical issues from the scan ID xxxxxxxxxxxxxxx"
|
||||
???+ note
|
||||
Ensure that one tool is added to one agent only. Adding tools is optional. There can be agents with no tools at all.
|
||||
|
||||
3. Use the `createReactAgent` function to define a new agent. For example, the rolesAgent name is "roles_agent" and has access to call tools "*getRolesTool*" and "*getRoleTool*"
|
||||
```js
|
||||
const rolesAgent = createReactAgent({
|
||||
llm: llm,
|
||||
tools: [getRolesTool, getRoleTool],
|
||||
name: "roles_agent",
|
||||
prompt: rolesAgentPrompt,
|
||||
});
|
||||
```
|
||||
|
||||
4. Create a detailed prompt defining the agent's purpose and capabilities.
|
||||
|
||||
5. Add the new agent to the available agents list:
|
||||
```js
|
||||
const agents = [
|
||||
userInfoAgent,
|
||||
providerAgent,
|
||||
overviewAgent,
|
||||
scansAgent,
|
||||
complianceAgent,
|
||||
findingsAgent,
|
||||
rolesAgent, // New agent added here
|
||||
];
|
||||
// Create supervisor workflow
|
||||
const workflow = createSupervisor({
|
||||
agents: agents,
|
||||
llm: supervisorllm,
|
||||
prompt: supervisorPrompt,
|
||||
outputMode: "last_message",
|
||||
});
|
||||
```
|
||||
|
||||
6. Update the supervisor's system prompt to summarize the new agent's capabilities.
|
||||
|
||||
### Best Practices for Agent Development
|
||||
|
||||
When developing new agents or capabilities:
|
||||
|
||||
- **Clear Responsibility Boundaries**: Each agent should have a defined purpose with minimal overlap. No two agents should access the same tools or different tools accessing the same Prowler APIs.
|
||||
- **Minimal Data Access**: Agents should only request the data they need, keeping requests specific to minimize context window usage, cost, and response time.
|
||||
- **Thorough Prompting:** Ensure agent prompts include clear instructions about:
|
||||
- The agent's purpose and limitations
|
||||
- How to use its tools
|
||||
- How to format responses for the supervisor
|
||||
- Error handling procedures (Optional)
|
||||
- **Security Considerations:** Agents should never modify data or access sensitive information like secrets or credentials.
|
||||
- **Testing:** Thoroughly test new agents with various queries before deploying to production.
|
||||
@@ -139,7 +139,7 @@ Prowler for M365 currently supports the following authentication types:
|
||||
???+ warning
|
||||
For Prowler App only the Service Principal with User Credentials authentication method is supported.
|
||||
|
||||
### Service Principal authentication
|
||||
### Service Principal authentication (recommended)
|
||||
|
||||
Authentication flag: `--sp-env-auth`
|
||||
|
||||
@@ -154,9 +154,11 @@ export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
|
||||
Follow the instructions in the [Create Prowler Service Principal](../tutorials/microsoft365/getting-started-m365.md#create-the-service-principal-app) section to create a service principal.
|
||||
|
||||
With this credentials you will only be able to run the checks that work through MS Graph, this means that you won't run all the provider. If you want to scan all the checks from M365 you will need to use the recommended authentication method.
|
||||
If you don't add the external API permissions described in the mentioned section above you will only be able to run the checks that work through MS Graph. This means that you won't run all the provider.
|
||||
|
||||
### Service Principal and User Credentials authentication (recommended)
|
||||
If you want to scan all the checks from M365 you will need to add the required permissions to the service principal application. Refer to the [Needed permissions](/docs/tutorials/microsoft365/getting-started-m365.md#needed-permissions) section for more information.
|
||||
|
||||
### Service Principal and User Credentials authentication
|
||||
|
||||
Authentication flag: `--env-auth`
|
||||
|
||||
@@ -170,9 +172,10 @@ export M365_USER="your_email@example.com"
|
||||
export M365_PASSWORD="examplepassword"
|
||||
```
|
||||
|
||||
These two new environment variables are **required** to execute the PowerShell modules needed to retrieve information from M365 services. Prowler uses Service Principal authentication to access Microsoft Graph and user credentials to authenticate to Microsoft PowerShell modules.
|
||||
These two new environment variables are **required** in this authentication method to execute the PowerShell modules needed to retrieve information from M365 services. Prowler uses Service Principal authentication to access Microsoft Graph and user credentials to authenticate to Microsoft PowerShell modules.
|
||||
|
||||
- `M365_USER` should be your Microsoft account email using the **assigned domain in the tenant**. This means it must look like `example@YourCompany.onmicrosoft.com` or `example@YourCompany.com`, but it must be the exact domain assigned to that user in the tenant.
|
||||
|
||||
???+ warning
|
||||
If the user is newly created, you need to sign in with that account first, as Microsoft will prompt you to change the password. If you don’t complete this step, user authentication will fail because Microsoft marks the initial password as expired.
|
||||
|
||||
@@ -205,27 +208,56 @@ Since this is a delegated permission authentication method, necessary permission
|
||||
|
||||
### Needed permissions
|
||||
|
||||
Prowler for M365 requires two types of permission scopes to be set (if you want to run the full provider including PowerShell checks). Both must be configured using Microsoft Entra ID:
|
||||
Prowler for M365 requires different permission scopes depending on the authentication method you choose. The permissions must be configured using Microsoft Entra ID:
|
||||
|
||||
- **Service Principal Application Permissions**: These are set at the **application** level and are used to retrieve data from the identity being assessed:
|
||||
- `AuditLog.Read.All`: Required for Entra service.
|
||||
- `Directory.Read.All`: Required for all services.
|
||||
- `Policy.Read.All`: Required for all services.
|
||||
- `SharePointTenantSettings.Read.All`: Required for SharePoint service.
|
||||
- `User.Read` (IMPORTANT: this must be set as **delegated**): Required for the sign-in.
|
||||
#### For Service Principal Authentication (`--sp-env-auth`) - Recommended
|
||||
|
||||
???+ note
|
||||
You can replace `Directory.Read.All` with `Domain.Read.All` is a more restrictive permission but you won't be able to run the Entra checks related with DirectoryRoles and GetUsers.
|
||||
When using service principal authentication, you need to add the following **Application Permissions** configured to:
|
||||
|
||||
> If you do this you will need to add also the `Organization.Read.All` permission to the service principal application in order to authenticate.
|
||||
**Microsoft Graph API Permissions:**
|
||||
- `AuditLog.Read.All`: Required for Entra service.
|
||||
- `Directory.Read.All`: Required for all services.
|
||||
- `Policy.Read.All`: Required for all services.
|
||||
- `SharePointTenantSettings.Read.All`: Required for SharePoint service.
|
||||
- `User.Read` (IMPORTANT: this must be set as **delegated**): Required for the sign-in.
|
||||
|
||||
**External API Permissions:**
|
||||
- `Exchange.ManageAsApp` from external API `Office 365 Exchange Online`: Required for Exchange PowerShell module app authentication. You also need to assign the `Exchange Administrator` role to the app.
|
||||
- `application_access` from external API `Skype and Teams Tenant Admin API`: Required for Teams PowerShell module app authentication.
|
||||
|
||||
???+ note
|
||||
You can replace `Directory.Read.All` with `Domain.Read.All` is a more restrictive permission but you won't be able to run the Entra checks related with DirectoryRoles and GetUsers.
|
||||
|
||||
- **Powershell Modules Permissions**: These are set at the `M365_USER` level, so the user used to run Prowler must have one of the following roles:
|
||||
- `Global Reader` (recommended): this allows you to read all roles needed.
|
||||
- `Exchange Administrator` and `Teams Administrator`: user needs both roles but with this [roles](https://learn.microsoft.com/en-us/exchange/permissions-exo/permissions-exo#microsoft-365-permissions-in-exchange-online) you can access to the same information as a Global Reader (since only read access is needed, Global Reader is recommended).
|
||||
> If you do this you will need to add also the `Organization.Read.All` permission to the service principal application in order to authenticate.
|
||||
|
||||
In order to know how to assign those permissions and roles follow the instructions in the Microsoft Entra ID [permissions](../tutorials/microsoft365/getting-started-m365.md#grant-required-api-permissions) and [roles](../tutorials/microsoft365/getting-started-m365.md#assign-required-roles-to-your-user) section.
|
||||
???+ warning
|
||||
With service principal only authentication, you can only run checks that work through MS Graph API. Some checks that require PowerShell modules will not be executed.
|
||||
|
||||
#### For Service Principal + User Credentials Authentication (`--env-auth`)
|
||||
|
||||
When using service principal with user credentials authentication, you need **both** sets of permissions:
|
||||
|
||||
**1. Service Principal Application Permissions**:
|
||||
- You **will need** all the Microsoft Graph API permissions listed above.
|
||||
- You **won't need** the External API permissions listed above.
|
||||
|
||||
**2. User-Level Permissions**: These are set at the `M365_USER` level, so the user used to run Prowler must have one of the following roles:
|
||||
- `Global Reader` (recommended): this allows you to read all roles needed.
|
||||
- `Exchange Administrator` and `Teams Administrator`: user needs both roles but with this [roles](https://learn.microsoft.com/en-us/exchange/permissions-exo/permissions-exo#microsoft-365-permissions-in-exchange-online) you can access to the same information as a Global Reader (since only read access is needed, Global Reader is recommended).
|
||||
|
||||
???+ note
|
||||
This is the **recommended authentication method** because it allows you to run the full M365 provider including PowerShell checks, providing complete coverage of all available security checks.
|
||||
|
||||
#### For Browser Authentication (`--browser-auth`)
|
||||
|
||||
When using browser authentication, permissions are delegated to the user, so the user must have the appropriate permissions rather than the application.
|
||||
|
||||
???+ warning
|
||||
With browser authentication, you will only be able to run checks that work through MS Graph API. PowerShell module checks will not be executed.
|
||||
|
||||
---
|
||||
|
||||
**To assign these permissions and roles**, follow the instructions in the Microsoft Entra ID [permissions](../tutorials/microsoft365/getting-started-m365.md#grant-required-api-permissions) and [roles](../tutorials/microsoft365/getting-started-m365.md#assign-required-roles-to-your-user) section.
|
||||
|
||||
|
||||
### Supported PowerShell versions
|
||||
@@ -439,6 +471,7 @@ The required modules are:
|
||||
|
||||
- [ExchangeOnlineManagement](https://www.powershellgallery.com/packages/ExchangeOnlineManagement/3.6.0): Minimum version 3.6.0. Required for several checks across Exchange, Defender, and Purview.
|
||||
- [MicrosoftTeams](https://www.powershellgallery.com/packages/MicrosoftTeams/6.6.0): Minimum version 6.6.0. Required for all Teams checks.
|
||||
- [MSAL.PS](https://www.powershellgallery.com/packages/MSAL.PS/4.32.0): Required for Exchange module via application authentication.
|
||||
|
||||
## GitHub
|
||||
### Authentication
|
||||
@@ -455,3 +488,34 @@ The provided credentials must have the appropriate permissions to perform all th
|
||||
|
||||
???+ note
|
||||
GitHub App Credentials support less checks than other authentication methods.
|
||||
|
||||
## Infrastructure as Code (IaC)
|
||||
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local or remote infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks and requires no cloud authentication for local scans.
|
||||
|
||||
### Authentication
|
||||
|
||||
- For local scans, no authentication is required.
|
||||
- For remote repository scans, authentication can be provided via:
|
||||
- [**GitHub Username and Personal Access Token (PAT)**](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)
|
||||
- [**GitHub OAuth App Token**](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-fine-grained-personal-access-token)
|
||||
- [**Git URL**](https://git-scm.com/docs/git-clone#_git_urls)
|
||||
|
||||
### Supported Frameworks
|
||||
|
||||
The IaC provider leverages Checkov to support multiple frameworks, including:
|
||||
|
||||
- Terraform
|
||||
- CloudFormation
|
||||
- Kubernetes
|
||||
- ARM (Azure Resource Manager)
|
||||
- Serverless
|
||||
- Dockerfile
|
||||
- YAML/JSON (generic IaC)
|
||||
- Bicep
|
||||
- Helm
|
||||
- GitHub Actions, GitLab CI, Bitbucket Pipelines, Azure Pipelines, CircleCI, Argo Workflows
|
||||
- Ansible
|
||||
- Kustomize
|
||||
- OpenAPI
|
||||
- SAST, SCA (Software Composition Analysis)
|
||||
|
||||
|
Before Width: | Height: | Size: 119 KiB After Width: | Height: | Size: 433 KiB |
@@ -572,12 +572,12 @@ With M365 you need to specify which auth method is going to be used:
|
||||
|
||||
```console
|
||||
|
||||
# To use service principal authentication for MSGraph and PowerShell modules
|
||||
prowler m365 --sp-env-auth
|
||||
|
||||
# To use both service principal (for MSGraph) and user credentials (for PowerShell modules)
|
||||
prowler m365 --env-auth
|
||||
|
||||
# To use service principal authentication
|
||||
prowler m365 --sp-env-auth
|
||||
|
||||
# To use az cli authentication
|
||||
prowler m365 --az-cli-auth
|
||||
|
||||
@@ -612,5 +612,40 @@ prowler github --github-app-id app_id --github-app-key app_key
|
||||
2. `OAUTH_APP_TOKEN`
|
||||
3. `GITHUB_APP_ID` and `GITHUB_APP_KEY`
|
||||
|
||||
#### Infrastructure as Code (IaC)
|
||||
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local or remote infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks, allowing you to assess your code before deployment.
|
||||
|
||||
```console
|
||||
# Scan a directory for IaC files
|
||||
prowler iac --scan-path ./my-iac-directory
|
||||
|
||||
# Scan a remote GitHub repository (public or private)
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git
|
||||
|
||||
# Authenticate to a private repo with GitHub username and PAT
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--github-username <username> --personal-access-token <token>
|
||||
|
||||
# Authenticate to a private repo with OAuth App Token
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--oauth-app-token <oauth_token>
|
||||
|
||||
# Specify frameworks to scan (default: all)
|
||||
prowler iac --scan-path ./my-iac-directory --frameworks terraform kubernetes
|
||||
|
||||
# Exclude specific paths
|
||||
prowler iac --scan-path ./my-iac-directory --exclude-path ./my-iac-directory/test,./my-iac-directory/examples
|
||||
```
|
||||
|
||||
???+ note
|
||||
- `--scan-path` and `--scan-repository-url` are mutually exclusive; only one can be specified at a time.
|
||||
- For remote repository scans, authentication can be provided via CLI flags or environment variables (`GITHUB_OAUTH_APP_TOKEN`, `GITHUB_USERNAME`, `GITHUB_PERSONAL_ACCESS_TOKEN`). CLI flags take precedence.
|
||||
- The IaC provider does not require cloud authentication for local scans.
|
||||
- It is ideal for CI/CD pipelines and local development environments.
|
||||
- For more details on supported frameworks and rules, see the [Checkov documentation](https://www.checkov.io/1.Welcome/Quick%20Start.html)
|
||||
|
||||
See more details about IaC scanning in the [IaC Tutorial](tutorials/iac/getting-started-iac.md) section.
|
||||
|
||||
## Prowler v2 Documentation
|
||||
For **Prowler v2 Documentation**, please check it out [here](https://github.com/prowler-cloud/prowler/blob/8818f47333a0c1c1a457453c87af0ea5b89a385f/README.md).
|
||||
|
||||
@@ -12,3 +12,34 @@
|
||||
|
||||
|
||||
See section [Logging](./tutorials/logging.md) for further information or [contact us](./contact.md).
|
||||
|
||||
## Common Issues with Docker Compose Installation
|
||||
|
||||
- **Problem adding AWS Provider using "Connect assuming IAM Role" in Docker (see [GitHub Issue #7745](https://github.com/prowler-cloud/prowler/issues/7745))**:
|
||||
|
||||
When running Prowler App via Docker, you may encounter errors such as `Provider not set`, `AWS assume role error - Unable to locate credentials`, or `Provider has no secret` when trying to add an AWS Provider using the "Connect assuming IAM Role" option. This typically happens because the container does not have access to the necessary AWS credentials or profiles.
|
||||
|
||||
**Workaround:**
|
||||
|
||||
- Ensure your AWS credentials and configuration are available to the Docker container. You can do this by mounting your local `.aws` directory into the container. For example, in your `docker-compose.yaml`, add the following volume to the relevant services:
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- "${HOME}/.aws:/home/prowler/.aws:ro"
|
||||
```
|
||||
This should be added to the `api`, `worker`, and `worker-beat` services.
|
||||
|
||||
- Create or update your `~/.aws/config` and `~/.aws/credentials` files with the appropriate profiles and roles. For example:
|
||||
|
||||
```ini
|
||||
[profile prowler-profile]
|
||||
role_arn = arn:aws:iam::<account-id>:role/ProwlerScan
|
||||
source_profile = default
|
||||
```
|
||||
And set the environment variable in your `.env` file:
|
||||
|
||||
```env
|
||||
AWS_PROFILE=prowler-profile
|
||||
```
|
||||
|
||||
- If you are scanning multiple AWS accounts, you may need to add multiple profiles to your AWS config. Note that this workaround is mainly for local testing; for production or multi-account setups, follow the [CloudFormation Template guide](https://github.com/prowler-cloud/prowler/issues/7745) and ensure the correct IAM roles and permissions are set up in each account.
|
||||
|
||||
@@ -27,18 +27,18 @@ prowler github --oauth-app-token oauth_token
|
||||
```
|
||||
|
||||
### GitHub App Credentials
|
||||
Use GitHub App credentials by specifying the App ID and the private key.
|
||||
Use GitHub App credentials by specifying the App ID and the private key path.
|
||||
|
||||
```console
|
||||
prowler github --github-app-id app_id --github-app-key app_key
|
||||
prowler github --github-app-id app_id --github-app-key-path app_key_path
|
||||
```
|
||||
|
||||
### Automatic Login Method Detection
|
||||
If no login method is explicitly provided, Prowler will automatically attempt to authenticate using environment variables in the following order of precedence:
|
||||
|
||||
1. `GITHUB_PERSONAL_ACCESS_TOKEN`
|
||||
2. `OAUTH_APP_TOKEN`
|
||||
3. `GITHUB_APP_ID` and `GITHUB_APP_KEY`
|
||||
2. `GITHUB_OAUTH_APP_TOKEN`
|
||||
3. `GITHUB_APP_ID` and `GITHUB_APP_KEY` (where the key is the content of the private key file)
|
||||
|
||||
???+ note
|
||||
Ensure the corresponding environment variables are set up before running Prowler for automatic detection if you don't plan to specify the login method.
|
||||
|
||||
209
docs/tutorials/github/getting-started-github.md
Normal file
@@ -0,0 +1,209 @@
|
||||
# Getting Started with GitHub Authentication
|
||||
|
||||
This guide explains how to set up authentication with GitHub for Prowler. The documentation covers credential retrieval processes for each supported authentication method.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- GitHub account
|
||||
- Token creation permissions (organization-level access requires admin permissions)
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
### 1. Personal Access Token (PAT)
|
||||
|
||||
Personal Access Tokens provide the simplest GitHub authentication method and support individual user authentication or testing scenarios.
|
||||
|
||||
#### How to Create a Personal Access Token
|
||||
|
||||
1. **Navigate to GitHub Settings**
|
||||
- Open [GitHub](https://github.com) and sign in
|
||||
- Click the profile picture in the top right corner
|
||||
- Select "Settings" from the dropdown menu
|
||||
|
||||
2. **Access Developer Settings**
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||
|
||||
3. **Generate New Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Tokens (classic)"
|
||||
- Click "Generate new token"
|
||||
|
||||
4. **Configure Token Permissions**
|
||||
To enable Prowler functionality, configure the following scopes:
|
||||
- `repo`: Full control of private repositories
|
||||
- `read:org`: Read organization and team membership
|
||||
- `read:user`: Read user profile data
|
||||
- `read:discussion`: Read discussions
|
||||
- `read:enterprise`: Read enterprise data (if applicable)
|
||||
|
||||
5. **Copy and Store the Token**
|
||||
- Copy the generated token immediately (GitHub displays tokens only once)
|
||||
- Store tokens securely using environment variables
|
||||
|
||||
#### How to Use Personal Access Tokens
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flag:**
|
||||
|
||||
```console
|
||||
prowler github --personal-access-token your_token_here
|
||||
```
|
||||
|
||||
**Environment variable:**
|
||||
|
||||
```console
|
||||
export GITHUB_PERSONAL_ACCESS_TOKEN="your_token_here"
|
||||
prowler github
|
||||
```
|
||||
|
||||
### 2. OAuth App Token
|
||||
|
||||
OAuth Apps enable applications to act on behalf of users with explicit consent.
|
||||
|
||||
#### How to Create an OAuth App
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "OAuth Apps"
|
||||
|
||||
2. **Register New Application**
|
||||
- Click "New OAuth App"
|
||||
- Complete the required fields:
|
||||
- **Application name**: Descriptive application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Authorization callback URL**: User redirection URL after authorization
|
||||
|
||||
3. **Obtain Authorization Code**
|
||||
- Request authorization code (replace `{app_id}` with the application ID):
|
||||
```
|
||||
https://github.com/login/oauth/authorize?client_id={app_id}
|
||||
```
|
||||
|
||||
4. **Exchange Code for Token**
|
||||
- Exchange authorization code for access token (replace `{app_id}`, `{secret}`, and `{code}`):
|
||||
```
|
||||
https://github.com/login/oauth/access_token?code={code}&client_id={app_id}&client_secret={secret}
|
||||
```
|
||||
|
||||
#### How to Use OAuth Tokens
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flag:**
|
||||
|
||||
```console
|
||||
prowler github --oauth-app-token your_oauth_token
|
||||
```
|
||||
|
||||
**Environment variable:**
|
||||
|
||||
```console
|
||||
export GITHUB_OAUTH_APP_TOKEN="your_oauth_token"
|
||||
prowler github
|
||||
```
|
||||
|
||||
### 3. GitHub App Credentials
|
||||
|
||||
GitHub Apps provide the recommended integration method for accessing multiple repositories or organizations.
|
||||
|
||||
#### How to Create a GitHub App
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "GitHub Apps"
|
||||
|
||||
2. **Create New GitHub App**
|
||||
- Click "New GitHub App"
|
||||
- Complete the required fields:
|
||||
- **GitHub App name**: Unique application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Webhook URL**: Webhook payload URL (optional)
|
||||
- **Permissions**: Application permission requirements
|
||||
|
||||
3. **Configure Permissions**
|
||||
To enable Prowler functionality, configure these permissions:
|
||||
- **Repository permissions**:
|
||||
- Contents (Read)
|
||||
- Metadata (Read)
|
||||
- Pull requests (Read)
|
||||
- **Organization permissions**:
|
||||
- Members (Read)
|
||||
- Administration (Read)
|
||||
- **Account permissions**:
|
||||
- Email addresses (Read)
|
||||
|
||||
4. **Generate Private Key**
|
||||
- Scroll to the "Private keys" section after app creation
|
||||
- Click "Generate a private key"
|
||||
- Download the `.pem` file and store securely
|
||||
|
||||
5. **Record App ID**
|
||||
- Locate the App ID at the top of the GitHub App settings page
|
||||
|
||||
#### How to Install the GitHub App
|
||||
|
||||
1. **Install Application**
|
||||
- Navigate to GitHub App settings
|
||||
- Click "Install App" in the left sidebar
|
||||
- Select the target account/organization
|
||||
- Choose specific repositories or select "All repositories"
|
||||
|
||||
#### How to Use GitHub App Credentials
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flags:**
|
||||
|
||||
```console
|
||||
prowler github --github-app-id your_app_id --github-app-key /path/to/private-key.pem
|
||||
```
|
||||
|
||||
**Environment variables:**
|
||||
|
||||
```console
|
||||
export GITHUB_APP_ID="your_app_id"
|
||||
export GITHUB_APP_KEY="private-key-content"
|
||||
prowler github
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Security Considerations
|
||||
|
||||
Implement the following security measures:
|
||||
|
||||
- **Secure Credential Storage**: Store credentials using environment variables instead of hardcoding tokens
|
||||
- **Secrets Management**: Use dedicated secrets management systems in production environments
|
||||
- **Regular Token Rotation**: Rotate tokens and keys regularly
|
||||
- **Least Privilege Principle**: Grant only minimum required permissions
|
||||
- **Permission Auditing**: Review and audit permissions regularly
|
||||
- **Token Expiration**: Set appropriate expiration times for tokens
|
||||
- **Usage Monitoring**: Monitor token usage and revoke unused tokens
|
||||
|
||||
### Authentication Method Selection
|
||||
|
||||
Choose the appropriate method based on use case:
|
||||
|
||||
- **Personal Access Token**: Individual use, testing, or simple automation
|
||||
- **OAuth App Token**: Applications requiring user consent and delegation
|
||||
- **GitHub App**: Production integrations, especially for organizations
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
### Insufficient Permissions
|
||||
- Verify token/app has necessary scopes/permissions
|
||||
- Check organization restrictions on third-party applications
|
||||
|
||||
### Token Expiration
|
||||
- Confirm token has not expired
|
||||
- Verify fine-grained tokens have correct resource access
|
||||
|
||||
### Rate Limiting
|
||||
- GitHub implements API call rate limits
|
||||
- Consider GitHub Apps for higher rate limits
|
||||
|
||||
### Organization Settings
|
||||
- Some organizations restrict third-party applications
|
||||
- Contact organization administrator if access is denied
|
||||
98
docs/tutorials/iac/getting-started-iac.md
Normal file
@@ -0,0 +1,98 @@
|
||||
# Getting Started with the IaC Provider
|
||||
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local or remote infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks, allowing you to assess your code before deployment.
|
||||
|
||||
## Supported Frameworks
|
||||
|
||||
The IaC provider leverages Checkov to support multiple frameworks, including:
|
||||
|
||||
- Terraform
|
||||
- CloudFormation
|
||||
- Kubernetes
|
||||
- ARM (Azure Resource Manager)
|
||||
- Serverless
|
||||
- Dockerfile
|
||||
- YAML/JSON (generic IaC)
|
||||
- Bicep
|
||||
- Helm
|
||||
- GitHub Actions, GitLab CI, Bitbucket Pipelines, Azure Pipelines, CircleCI, Argo Workflows
|
||||
- Ansible
|
||||
- Kustomize
|
||||
- OpenAPI
|
||||
- SAST, SCA (Software Composition Analysis)
|
||||
|
||||
## How It Works
|
||||
|
||||
- The IaC provider scans your local directory (or a specified path) for supported IaC files, or scan a remote repository.
|
||||
- No cloud credentials or authentication are required for local scans.
|
||||
- For remote repository scans, authentication can be provided via [git URL](https://git-scm.com/docs/git-clone#_git_urls), CLI flags or environment variables.
|
||||
- Mutelist logic is handled by Checkov, not Prowler.
|
||||
- Results are output in the same formats as other Prowler providers (CSV, JSON, HTML, etc.).
|
||||
|
||||
## Usage
|
||||
|
||||
To run Prowler with the IaC provider, use the `iac` argument. You can specify the directory or repository to scan, frameworks to include, and paths to exclude.
|
||||
|
||||
### Scan a Local Directory (default)
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./my-iac-directory
|
||||
```
|
||||
|
||||
### Scan a Remote GitHub Repository
|
||||
|
||||
```sh
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git
|
||||
```
|
||||
|
||||
#### Authentication for Remote Private Repositories
|
||||
|
||||
You can provide authentication for private repositories using one of the following methods:
|
||||
|
||||
- **GitHub Username and Personal Access Token (PAT):**
|
||||
```sh
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--github-username <username> --personal-access-token <token>
|
||||
```
|
||||
- **GitHub OAuth App Token:**
|
||||
```sh
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--oauth-app-token <oauth_token>
|
||||
```
|
||||
- If not provided via CLI, the following environment variables will be used (in order of precedence):
|
||||
- `GITHUB_OAUTH_APP_TOKEN`
|
||||
- `GITHUB_USERNAME` and `GITHUB_PERSONAL_ACCESS_TOKEN`
|
||||
- If neither CLI flags nor environment variables are set, the scan will attempt to clone without authentication or using the provided in the [git URL](https://git-scm.com/docs/git-clone#_git_urls).
|
||||
|
||||
#### Mutually Exclusive Flags
|
||||
- `--scan-path` and `--scan-repository-url` are mutually exclusive. Only one can be specified at a time.
|
||||
|
||||
### Specify Frameworks
|
||||
|
||||
Scan only Terraform and Kubernetes files:
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./my-iac-directory --frameworks terraform kubernetes
|
||||
```
|
||||
|
||||
### Exclude Paths
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./my-iac-directory --exclude-path ./my-iac-directory/test,./my-iac-directory/examples
|
||||
```
|
||||
|
||||
## Output
|
||||
|
||||
You can use the standard Prowler output options, for example:
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./iac --output-formats csv json html
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- The IaC provider does not require cloud authentication for local scans.
|
||||
- For remote repository scans, authentication is optional but required for private repos.
|
||||
- CLI flags override environment variables for authentication.
|
||||
- It is ideal for CI/CD pipelines and local development environments.
|
||||
- For more details on supported frameworks and rules, see the [Checkov documentation](https://www.checkov.io/1.Welcome/Quick%20Start.html).
|
||||
BIN
docs/tutorials/img/gcp-auth-methods.png
Normal file
|
After Width: | Height: | Size: 255 KiB |
BIN
docs/tutorials/img/gcp-service-account-creds.png
Normal file
|
After Width: | Height: | Size: 332 KiB |
BIN
docs/tutorials/img/lighthouse-architecture.png
Normal file
|
After Width: | Height: | Size: 178 KiB |
BIN
docs/tutorials/img/lighthouse-config.png
Normal file
|
After Width: | Height: | Size: 197 KiB |
BIN
docs/tutorials/img/lighthouse-feature1.png
Normal file
|
After Width: | Height: | Size: 204 KiB |
BIN
docs/tutorials/img/lighthouse-feature2.png
Normal file
|
After Width: | Height: | Size: 241 KiB |
BIN
docs/tutorials/img/lighthouse-feature3.png
Normal file
|
After Width: | Height: | Size: 268 KiB |
BIN
docs/tutorials/img/lighthouse-intro.png
Normal file
|
After Width: | Height: | Size: 404 KiB |
@@ -12,7 +12,7 @@ This allows Prowler to authenticate against Microsoft 365 using the following me
|
||||
To launch the tool first you need to specify which method is used through the following flags:
|
||||
|
||||
```console
|
||||
# To use service principal (app) authentication and Microsoft user credentials (to use PowerShell)
|
||||
# To use service principal (app) authentication and Microsoft user credentials
|
||||
prowler m365 --env-auth
|
||||
|
||||
# To use service principal authentication
|
||||
@@ -25,4 +25,4 @@ prowler m365 --az-cli-auth
|
||||
prowler m365 --browser-auth --tenant-id "XXXXXXXX"
|
||||
```
|
||||
|
||||
To use Prowler you need to set up also the permissions required to access your resources in your Microsoft 365 account, to more details refer to [Requirements](../../getting-started/requirements.md#microsoft-365)
|
||||
To use Prowler you need to set up also the permissions required to access your resources in your Microsoft 365 account, to more details refer to [Requirements](../../getting-started/requirements.md#needed-permissions-2)
|
||||
|
||||
@@ -30,7 +30,7 @@ Go to the Entra ID portal, then you can search for `Domain` or go to Identity >
|
||||
|
||||

|
||||
|
||||
Once you are there just select the domain you want to use.
|
||||
Once you are there just select the domain you want to use as unique identifier for your M365 account in Prowler Cloud/App.
|
||||
|
||||
---
|
||||
|
||||
@@ -92,7 +92,7 @@ With this done you will have all the needed keys, summarized in the following ta
|
||||
|
||||
---
|
||||
|
||||
### Grant required API permissions
|
||||
### Grant required Graph API permissions
|
||||
|
||||
Assign the following Microsoft Graph permissions:
|
||||
|
||||
@@ -100,7 +100,7 @@ Assign the following Microsoft Graph permissions:
|
||||
- `Directory.Read.All`: Required for all services.
|
||||
- `Policy.Read.All`: Required for all services.
|
||||
- `SharePointTenantSettings.Read.All`: Required for SharePoint service.
|
||||
- `User.Read` (IMPORTANT: this is set as **delegated**): Required for the sign-in.
|
||||
- `User.Read` (IMPORTANT: this is set as **delegated**): Required for the sign-in only if using user authentication.
|
||||
|
||||
???+ note
|
||||
You can replace `Directory.Read.All` with `Domain.Read.All` is a more restrictive permission but you won't be able to run the Entra checks related with DirectoryRoles and GetUsers.
|
||||
@@ -128,18 +128,83 @@ Follow these steps to assign the permissions:
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
4. Click `+ Add a permission` > `Microsoft Graph` > `Delegated permissions`
|
||||
|
||||

|
||||
### Grant PowerShell modules permissions
|
||||
|
||||
5. Search and select:
|
||||
The permissions you need to grant depends on whether you are using user credentials or service principal to authenticate to the M365 modules.
|
||||
|
||||
???+ warning "Warning"
|
||||
Make sure you add the correct set of permissions for the authentication method you are using.
|
||||
|
||||
|
||||
#### If using application(service principal) authentication (Recommended)
|
||||
|
||||
To grant the permissions for the PowerShell modules via application authentication, you need to add the necessary APIs to your app registration.
|
||||
|
||||
???+ warning "Warning"
|
||||
You need to have a license that allows you to use the APIs.
|
||||
|
||||
1. Add Exchange API:
|
||||
|
||||
- Search and select`Office 365 Exchange Online` API in **APIs my organization uses**.
|
||||
|
||||

|
||||
|
||||
- Select `Exchange.ManageAsApp` permission and click on `Add permissions`.
|
||||
|
||||

|
||||
|
||||
You also need to assign the `Exchange Administrator` role to the app. For that go to `Roles and administrators` and in the `Administrative roles` section click `here` to go to the directory level assignment:
|
||||
|
||||

|
||||
|
||||
Once in the directory level assignment, search for `Exchange Administrator` and click on it to open the assginments page of that role.
|
||||
|
||||

|
||||
|
||||
Click on `Add assignments`, search for your app and click on `Assign`.
|
||||
|
||||
You have to select it as `Active` and click on `Assign` to assign the role to the app.
|
||||
|
||||

|
||||
|
||||
2. Add Teams API:
|
||||
|
||||
- Search and select `Skype and Teams Tenant Admin API` API in **APIs my organization uses**.
|
||||
|
||||

|
||||
|
||||
- Select `application_access` permission and click on `Add permissions`.
|
||||
|
||||

|
||||
|
||||
3. Click on `Grant admin consent for <your-tenant-name>` to grant admin consent.
|
||||
|
||||

|
||||
|
||||
The final result of permission assignment should be this:
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
#### If using user authentication
|
||||
|
||||
This method is not recommended because it requires a user with MFA enabled and Microsoft will not allow MFA capable users to authenticate programmatically after 1st September 2025. See [Microsoft documentation](https://learn.microsoft.com/en-us/entra/identity/authentication/concept-mandatory-multifactor-authentication?tabs=dotnet) for more information.
|
||||
|
||||
???+ warning
|
||||
Remember that if the user is newly created, you need to sign in with that account first, as Microsoft will prompt you to change the password. If you don’t complete this step, user authentication will fail because Microsoft marks the initial password as expired.
|
||||
|
||||
|
||||
1. Search and select:
|
||||
|
||||
- `User.Read`
|
||||
|
||||

|
||||
|
||||
6. After adding all the permissions, click on `Grant admin consent`
|
||||
2. Click `Add permissions`, then **grant admin consent**
|
||||
|
||||

|
||||
|
||||
@@ -147,37 +212,32 @@ Follow these steps to assign the permissions:
|
||||
|
||||

|
||||
|
||||
---
|
||||
3. Assign **required roles** to your **user**
|
||||
|
||||
### Assign required roles to your user
|
||||
Assign one of the following roles to your User:
|
||||
|
||||
Assign one of the following roles to your User:
|
||||
- `Global Reader` (recommended): this allows you to read all roles needed.
|
||||
- `Exchange Administrator` and `Teams Administrator`: user needs both roles but with this [roles](https://learn.microsoft.com/en-us/exchange/permissions-exo/permissions-exo#microsoft-365-permissions-in-exchange-online) you can access to the same information as a Global Reader (here you only read so that's why we recomend that role).
|
||||
|
||||
- `Global Reader` (recommended): this allows you to read all roles needed.
|
||||
- `Exchange Administrator` and `Teams Administrator`: user needs both roles but with this [roles](https://learn.microsoft.com/en-us/exchange/permissions-exo/permissions-exo#microsoft-365-permissions-in-exchange-online) you can access to the same information as a Global Reader (here you only read so that's why we recomend that role).
|
||||
Follow these steps to assign the role:
|
||||
|
||||
Follow these steps to assign the role:
|
||||
1. Go to Users > All Users > Click on the email for the user you will use
|
||||
|
||||
1. Go to Users > All Users > Click on the email for the user you will use
|
||||

|
||||
|
||||

|
||||
2. Click `Assigned Roles`
|
||||
|
||||
2. Click `Assigned Roles`
|
||||

|
||||
|
||||

|
||||
3. Click on `Add assignments`, then search and select:
|
||||
|
||||
3. Click on `Add assignments`, then search and select:
|
||||
- `Global Reader` This is the recommended, if you want to use the others just search for them
|
||||
|
||||
- `Global Reader` This is the recommended, if you want to use the others just search for them
|
||||

|
||||
|
||||

|
||||
4. Click on next, then assign the role as `Active`, and click on `Assign` to grant admin consent
|
||||
|
||||
4. Click on next, then assign the role as `Active`, and click on `Assign` to grant admin consent
|
||||
|
||||

|
||||
|
||||
???+ warning
|
||||
Remember that if the user is newly created, you need to sign in with that account first, as Microsoft will prompt you to change the password. If you don’t complete this step, user authentication will fail because Microsoft marks the initial password as expired.
|
||||

|
||||
|
||||
---
|
||||
|
||||
@@ -193,6 +253,8 @@ Follow these steps to assign the role:
|
||||
- `Client ID`
|
||||
- `Tenant ID`
|
||||
- `AZURE_CLIENT_SECRET` from earlier
|
||||
|
||||
If you are using user authentication, also add:
|
||||
- `M365_USER` the user using the correct assigned domain, more info [here](../../getting-started/requirements.md#service-principal-and-user-credentials-authentication-recommended)
|
||||
- `M365_PASSWORD` the password of the user
|
||||
|
||||
|
||||
BIN
docs/tutorials/microsoft365/img/add-assginments.png
Normal file
|
After Width: | Height: | Size: 118 KiB |
|
After Width: | Height: | Size: 142 KiB |
BIN
docs/tutorials/microsoft365/img/exchange-administrator-role.png
Normal file
|
After Width: | Height: | Size: 111 KiB |
BIN
docs/tutorials/microsoft365/img/exchange-permission.png
Normal file
|
After Width: | Height: | Size: 82 KiB |
BIN
docs/tutorials/microsoft365/img/final-permissions.png
Normal file
|
After Width: | Height: | Size: 196 KiB |
|
After Width: | Height: | Size: 268 KiB |
BIN
docs/tutorials/microsoft365/img/here.png
Normal file
|
After Width: | Height: | Size: 349 KiB |