mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-04-01 13:47:21 +00:00
Compare commits
38 Commits
PRWLR-6469
...
PRWLR-5513
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0715c659c8 | ||
|
|
33bd651e19 | ||
|
|
5346867a6a | ||
|
|
f174f8af7c | ||
|
|
5205b85c49 | ||
|
|
446b02e009 | ||
|
|
c7ba87faba | ||
|
|
817ab62fe7 | ||
|
|
359a53bf16 | ||
|
|
0ea3a202c4 | ||
|
|
0b8bf9851a | ||
|
|
27a8bb03d3 | ||
|
|
6b7fa9ac4e | ||
|
|
b50ac49a7e | ||
|
|
93ea60cc86 | ||
|
|
9fe50eb264 | ||
|
|
6f961139a1 | ||
|
|
7001789997 | ||
|
|
aa0d82f500 | ||
|
|
b94a535aba | ||
|
|
9c9526af80 | ||
|
|
1d0243652e | ||
|
|
4e64a26ece | ||
|
|
b2e58c3045 | ||
|
|
45668eb53a | ||
|
|
cac7c42f13 | ||
|
|
5ee960e13d | ||
|
|
c11e6449f9 | ||
|
|
fa8a7f44de | ||
|
|
f028e41652 | ||
|
|
b815eeb7ab | ||
|
|
d0701ad0ce | ||
|
|
d4e5b7c7c7 | ||
|
|
d27c5b7190 | ||
|
|
5ce7aa33ad | ||
|
|
2628a4e506 | ||
|
|
27b628152d | ||
|
|
f8f1e6c076 |
58
.env
58
.env
@@ -3,17 +3,16 @@
|
||||
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
PROWLER_UI_VERSION="latest"
|
||||
SITE_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="stable"
|
||||
PROWLER_API_VERSION="latest"
|
||||
# PostgreSQL settings
|
||||
# If running Django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=postgres-db
|
||||
@@ -30,30 +29,6 @@ VALKEY_HOST=valkey
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# API scan settings
|
||||
|
||||
# The path to the directory where scan output should be stored
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
|
||||
|
||||
# The maximum number of findings to process in a single batch
|
||||
DJANGO_FINDINGS_BATCH_SIZE=1000
|
||||
|
||||
# The AWS access key to be used when uploading scan output to an S3 bucket
|
||||
# If left empty, default AWS credentials resolution behavior will be used
|
||||
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID=""
|
||||
|
||||
# The AWS secret key to be used when uploading scan output to an S3 bucket
|
||||
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY=""
|
||||
|
||||
# An optional AWS session token
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN=""
|
||||
|
||||
# The AWS region where your S3 bucket is located (e.g., "us-east-1")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION=""
|
||||
|
||||
# The name of the S3 bucket where scan output should be stored
|
||||
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET=""
|
||||
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
@@ -65,12 +40,9 @@ DJANGO_LOGGING_FORMATTER=human_readable
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
# Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_WORKERS=4
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30 # Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440 # Token lifetime is in minutes
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_MANAGE_DB_PARTITIONS=True
|
||||
@@ -115,21 +87,3 @@ jQIDAQAB
|
||||
-----END PUBLIC KEY-----"
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.5.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
4
.github/codeql/api-codeql-config.yml
vendored
4
.github/codeql/api-codeql-config.yml
vendored
@@ -1,3 +1,3 @@
|
||||
name: "API - CodeQL Config"
|
||||
name: "Custom CodeQL Config for API"
|
||||
paths:
|
||||
- "api/"
|
||||
- 'api/'
|
||||
4
.github/codeql/codeql-config.yml
vendored
Normal file
4
.github/codeql/codeql-config.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
name: "Custom CodeQL Config"
|
||||
paths-ignore:
|
||||
- 'api/'
|
||||
- 'ui/'
|
||||
4
.github/codeql/sdk-codeql-config.yml
vendored
4
.github/codeql/sdk-codeql-config.yml
vendored
@@ -1,4 +0,0 @@
|
||||
name: "SDK - CodeQL Config"
|
||||
paths-ignore:
|
||||
- "api/"
|
||||
- "ui/"
|
||||
2
.github/codeql/ui-codeql-config.yml
vendored
2
.github/codeql/ui-codeql-config.yml
vendored
@@ -1,3 +1,3 @@
|
||||
name: "UI - CodeQL Config"
|
||||
name: "Custom CodeQL Config for UI"
|
||||
paths:
|
||||
- "ui/"
|
||||
|
||||
81
.github/dependabot.yml
vendored
81
.github/dependabot.yml
vendored
@@ -5,7 +5,6 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# v5
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
@@ -15,19 +14,6 @@ updates:
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/api"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
@@ -38,79 +24,20 @@ updates:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "npm"
|
||||
# directory: "/ui"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "npm"
|
||||
# - "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
|
||||
# v4.6
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v4"
|
||||
|
||||
- "v3"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v4"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
- "v4"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# v3
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v3"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
|
||||
15
.github/labeler.yml
vendored
15
.github/labeler.yml
vendored
@@ -22,11 +22,6 @@ provider/kubernetes:
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/**"
|
||||
|
||||
provider/github:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -92,13 +87,3 @@ component/api:
|
||||
component/ui:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "ui/**"
|
||||
|
||||
compliance:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/compliance/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/compliance/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/compliance/**"
|
||||
|
||||
review-django-migrations:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/src/backend/api/migrations/**"
|
||||
|
||||
8
.github/pull_request_template.md
vendored
8
.github/pull_request_template.md
vendored
@@ -15,13 +15,7 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
|
||||
#### API
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
- [ ] Check if version updates are required (e.g., specs, Poetry, etc.).
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/api/CHANGELOG.md), if applicable.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
114
.github/workflows/api-build-lint-push-containers.yml
vendored
114
.github/workflows/api-build-lint-push-containers.yml
vendored
@@ -1,114 +0,0 @@
|
||||
name: API - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "api/**"
|
||||
# - ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./api
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-api-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
16
.github/workflows/api-codeql.yml
vendored
16
.github/workflows/api-codeql.yml
vendored
@@ -9,18 +9,22 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: API - CodeQL
|
||||
name: "API - CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
pull_request:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
@@ -44,16 +48,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
53
.github/workflows/api-pull-request.yml
vendored
53
.github/workflows/api-pull-request.yml
vendored
@@ -1,20 +1,18 @@
|
||||
name: API - Pull Request
|
||||
name: "API - Pull Request"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- ".github/workflows/api-pull-request.yml"
|
||||
- "api/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_PORT: 5432
|
||||
@@ -26,8 +24,7 @@ env:
|
||||
VALKEY_HOST: localhost
|
||||
VALKEY_PORT: 6379
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -71,11 +68,10 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@823fcebdb31bb35fdf2229d9f769b400309430d0 # v46.0.3
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
@@ -84,26 +80,23 @@ jobs:
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -115,75 +108,49 @@ jobs:
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
poetry lock --check
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Check Format with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Lint with pylint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
- name: Bandit
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5.4.0
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
39
.github/workflows/backport.yml
vendored
39
.github/workflows/backport.yml
vendored
@@ -1,47 +1,42 @@
|
||||
name: Prowler - Automatic Backport
|
||||
name: Automatic Backport
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
|
||||
env:
|
||||
# The prefix of the label that triggers the backport must not contain the branch name
|
||||
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Workaround not to fail the workflow if the PR does not need a backport
|
||||
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
|
||||
- name: Check for backport labels
|
||||
id: check_labels
|
||||
run: |-
|
||||
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
|
||||
echo "$labels"
|
||||
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
|
||||
echo "matched=$matched"
|
||||
echo "matched=$matched" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
if: fromJSON(steps.check_labels.outputs.matched) > 0
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
auto_backport_label_prefix: backport-to-
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Prowler - Pull Request Documentation Link
|
||||
name: Pull Request Documentation Link
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
name: SDK - Build and Push containers
|
||||
name: Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For `v3-latest`
|
||||
- "v3"
|
||||
# For `v4-latest`
|
||||
- "v4.6"
|
||||
# For `latest`
|
||||
- "master"
|
||||
paths-ignore:
|
||||
- ".github/**"
|
||||
@@ -42,10 +38,6 @@ env:
|
||||
# Python configuration
|
||||
PYTHON_VERSION: 3.12
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler
|
||||
|
||||
jobs:
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
@@ -59,16 +51,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
@@ -89,13 +81,7 @@ jobs:
|
||||
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
|
||||
4)
|
||||
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
5)
|
||||
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
@@ -108,13 +94,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -123,24 +109,23 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
@@ -151,8 +136,6 @@ jobs:
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.PROWLER_VERSION }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.PROWLER_VERSION }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
@@ -9,24 +9,22 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: SDK - CodeQL
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
pull_request:
|
||||
branches:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
@@ -50,16 +48,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
23
.github/workflows/conventional-commit.yml
vendored
23
.github/workflows/conventional-commit.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Prowler - Conventional Commit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- "opened"
|
||||
- "edited"
|
||||
- "synchronize"
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'
|
||||
8
.github/workflows/find-secrets.yml
vendored
8
.github/workflows/find-secrets.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Prowler - Find secrets
|
||||
name: Find secrets
|
||||
|
||||
on: pull_request
|
||||
|
||||
@@ -7,13 +7,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@34339eaf08bf5c2a27dbd969812127721f3743ed # v3.88.22
|
||||
uses: trufflesecurity/trufflehog@v3.84.1
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
extra_args: --only-verified
|
||||
4
.github/workflows/labeler.yml
vendored
4
.github/workflows/labeler.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Prowler - PR Labeler
|
||||
name: "Pull Request Labeler"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
- uses: actions/labeler@v5
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: SDK - Pull Request
|
||||
name: "Pull Request"
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -21,11 +21,10 @@ jobs:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@823fcebdb31bb35fdf2229d9f769b400309430d0 # v46.0.3
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -37,84 +36,65 @@ jobs:
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
.env
|
||||
docker-compose*
|
||||
examples/**
|
||||
.gitignore
|
||||
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
poetry lock --check
|
||||
- name: Lint with flake8
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
|
||||
|
||||
- name: Checking format with black
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run black --exclude api ui --check .
|
||||
|
||||
- name: Lint with pylint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
|
||||
- name: Bandit
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
|
||||
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612 -r pyproject.toml
|
||||
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5.4.0
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
@@ -1,4 +1,4 @@
|
||||
name: SDK - PyPI release
|
||||
name: PyPI release
|
||||
|
||||
on:
|
||||
release:
|
||||
@@ -10,40 +10,12 @@ env:
|
||||
CACHE: "poetry"
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
release-prowler-job:
|
||||
runs-on: ubuntu-latest
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
name: Release Prowler to PyPI
|
||||
steps:
|
||||
- name: Repository check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ "${{ github.repository }}" != "prowler-cloud/prowler" ]]; then
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Get Prowler version
|
||||
run: |
|
||||
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
|
||||
@@ -55,23 +27,20 @@ jobs:
|
||||
4)
|
||||
echo "Releasing Prowler v4 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
5)
|
||||
echo "Releasing Prowler v5 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
*)
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
@@ -1,6 +1,6 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: SDK - Refresh AWS services' regions
|
||||
name: Refresh regions of AWS services
|
||||
|
||||
on:
|
||||
schedule:
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -50,13 +50,12 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
118
.github/workflows/ui-build-lint-push-containers.yml
vendored
118
.github/workflows/ui-build-lint-push-containers.yml
vendored
@@ -1,118 +0,0 @@
|
||||
name: UI - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "ui/**"
|
||||
- ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the below code to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "ui/**"
|
||||
# - ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./ui
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-ui-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
34
.github/workflows/ui-checks.yml
vendored
Normal file
34
.github/workflows/ui-checks.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: "UI - Pull Request"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
test-and-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm install
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
10
.github/workflows/ui-codeql.yml
vendored
10
.github/workflows/ui-codeql.yml
vendored
@@ -9,18 +9,20 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: UI - CodeQL
|
||||
name: "UI - CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
@@ -44,16 +46,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
62
.github/workflows/ui-pull-request.yml
vendored
62
.github/workflows/ui-pull-request.yml
vendored
@@ -1,62 +0,0 @@
|
||||
name: UI - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- ".github/workflows/ui-pull-request.yml"
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'ui/**'
|
||||
env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
IMAGE_NAME: prowler-ui
|
||||
|
||||
jobs:
|
||||
test-and-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm install
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
target: prod
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -31,7 +31,7 @@ tags
|
||||
*.DS_Store
|
||||
|
||||
# Prowler output
|
||||
/output
|
||||
output/
|
||||
|
||||
# Prowler found secrets
|
||||
secrets-*/
|
||||
@@ -45,12 +45,10 @@ junit-reports/
|
||||
# Terraform
|
||||
.terraform*
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
|
||||
@@ -27,7 +27,6 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.1
|
||||
@@ -59,28 +58,11 @@ repos:
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.1.1
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
name: SDK - poetry-check
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
args: ["--no-update"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
@@ -108,7 +90,7 @@ repos:
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
@@ -121,6 +103,7 @@ repos:
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/" --min-confidence 100 .'
|
||||
entry: bash -c 'vulture --exclude "contrib" --min-confidence 100 .'
|
||||
exclude: 'api/src/backend/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
26
Dockerfile
26
Dockerfile
@@ -1,10 +1,10 @@
|
||||
FROM python:3.12.9-alpine3.20
|
||||
FROM python:3.12-alpine
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git gcc python3-dev musl-dev linux-headers
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
|
||||
# Create non-root user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
@@ -18,25 +18,21 @@ WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler/
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
# By default poetry does not compile Python source files to bytecode during installation.
|
||||
# This speeds up the installation process, but the first execution may take a little more
|
||||
# time because Python then compiles source files to bytecode automatically. If you want to
|
||||
# compile source files to bytecode during installation, you can use the --compile option
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
ENTRYPOINT ["prowler"]
|
||||
|
||||
56
README.md
56
README.md
@@ -3,7 +3,7 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
@@ -29,7 +29,7 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler?include_prereleases"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
|
||||
@@ -43,7 +43,7 @@
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
|
||||
## Prowler App
|
||||
|
||||
@@ -71,13 +71,10 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 564 | 82 | 33 | 10 |
|
||||
| GCP | 78 | 13 | 6 | 3 |
|
||||
| Azure | 140 | 18 | 7 | 3 |
|
||||
| Kubernetes | 83 | 7 | 4 | 7 |
|
||||
| Microsoft365 | 5 | 2 | 1 | 0 |
|
||||
|
||||
> You can list the checks, services, compliance frameworks and categories with `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
| AWS | 561 | 81 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 3 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 139 | 18 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
@@ -101,7 +98,6 @@ curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/mast
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
### From GitHub
|
||||
@@ -109,7 +105,7 @@ docker compose up -d
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
@@ -119,7 +115,7 @@ docker compose up -d
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
docker compose up postgres valkey -d
|
||||
@@ -127,11 +123,6 @@ cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
@@ -141,26 +132,13 @@ gunicorn -c config/guniconf.py config.wsgi:application
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
**Commands to run the API Scheduler**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
```
|
||||
|
||||
**Commands to run the UI**
|
||||
|
||||
``` console
|
||||
@@ -175,7 +153,7 @@ npm start
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python > 3.9.1, < 3.13:
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
|
||||
```console
|
||||
pip install prowler
|
||||
@@ -205,21 +183,15 @@ The container images are available here:
|
||||
|
||||
### From GitHub
|
||||
|
||||
Python > 3.9.1, < 3.13 is required with pip and poetry:
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
eval $(poetry env activate)
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler-cli.py -v
|
||||
python prowler.py -v
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
|
||||
@@ -22,8 +22,6 @@ DJANGO_SECRETS_ENCRYPTION_KEY=""
|
||||
# Decide whether to allow Django manage database table partitions
|
||||
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
|
||||
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
|
||||
# PostgreSQL settings
|
||||
# If running django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
@@ -40,19 +38,3 @@ POSTGRES_DB=prowler_db
|
||||
VALKEY_HOST=[localhost|valkey]
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
# Social login credentials
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
|
||||
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""
|
||||
|
||||
# Deletion Task Batch Size
|
||||
DJANGO_DELETION_BATCH_SIZE=5000
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
# Prowler API Changelog
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
---
|
||||
|
||||
## [v1.6.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289).
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333).
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378).
|
||||
- Added missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
|
||||
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
|
||||
### Fixed
|
||||
- Added a handled response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183).
|
||||
- Fixed a race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172).
|
||||
- Handled exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283).
|
||||
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.0] (Prowler v5.4.0)
|
||||
|
||||
### Added
|
||||
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
|
||||
- Add API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878).
|
||||
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
|
||||
|
||||
### Changed
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019).
|
||||
|
||||
---
|
||||
|
||||
## [v1.4.0] (Prowler v5.3.0)
|
||||
|
||||
### Changed
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
|
||||
---
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.8-alpine3.20 AS build
|
||||
FROM python:3.12-alpine AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
@@ -21,8 +21,7 @@ COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
RUN poetry install && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
@@ -269,66 +269,3 @@ poetry shell
|
||||
cd src/backend
|
||||
pytest
|
||||
```
|
||||
|
||||
# Custom commands
|
||||
|
||||
Django provides a way to create custom commands that can be run from the command line.
|
||||
|
||||
> These commands can be found in: ```prowler/api/src/backend/api/management/commands```
|
||||
|
||||
To run a custom command, you need to be in the `prowler/api/src/backend` directory and run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
python manage.py <command_name>
|
||||
```
|
||||
|
||||
## Generate dummy data
|
||||
|
||||
```console
|
||||
python manage.py findings --tenant
|
||||
<TENANT_ID> --findings <NUM_FINDINGS> --re
|
||||
sources <NUM_RESOURCES> --batch <TRANSACTION_BATCH_SIZE> --alias <ALIAS>
|
||||
```
|
||||
|
||||
This command creates, for a given tenant, a provider, scan and a set of findings and resources related altogether.
|
||||
|
||||
> Scan progress and state are updated in real time.
|
||||
> - 0-33%: Create resources.
|
||||
> - 33-66%: Create findings.
|
||||
> - 66%: Create resource-finding mapping.
|
||||
>
|
||||
> The last step is required to access the findings details, since the UI needs that to print all the information.
|
||||
|
||||
### Example
|
||||
|
||||
```console
|
||||
~/backend $ poetry run python manage.py findings --tenant
|
||||
fffb1893-3fc7-4623-a5d9-fae47da1c528 --findings 25000 --re
|
||||
sources 1000 --batch 5000 --alias test-script
|
||||
|
||||
Starting data population
|
||||
Tenant: fffb1893-3fc7-4623-a5d9-fae47da1c528
|
||||
Alias: test-script
|
||||
Resources: 1000
|
||||
Findings: 25000
|
||||
Batch size: 5000
|
||||
|
||||
|
||||
Creating resources...
|
||||
100%|███████████████████████| 1/1 [00:00<00:00, 7.72it/s]
|
||||
Resources created successfully.
|
||||
|
||||
|
||||
Creating findings...
|
||||
100%|███████████████████████| 5/5 [00:05<00:00, 1.09s/it]
|
||||
Findings created successfully.
|
||||
|
||||
|
||||
Creating resource-finding mappings...
|
||||
100%|███████████████████████| 5/5 [00:02<00:00, 1.81it/s]
|
||||
Resource-finding mappings created successfully.
|
||||
|
||||
|
||||
Successfully populated test data.
|
||||
```
|
||||
|
||||
@@ -28,7 +28,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
3109
api/poetry.lock
generated
3109
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,48 +2,41 @@
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.7",
|
||||
"django-allauth==65.4.1",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra (>=2.0.8,<3.0.0)",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
|
||||
"drf-nested-routers (>=0.94.1,<1.0.0)",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10"
|
||||
]
|
||||
[tool.poetry]
|
||||
authors = ["Prowler Team"]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.6.0"
|
||||
version = "1.0.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
django = "5.1.1"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
django-environ = "0.11.2"
|
||||
django-filter = "24.3"
|
||||
django-guid = "3.5.0"
|
||||
django-postgres-extra = "^2.0.8"
|
||||
djangorestframework = "3.15.2"
|
||||
djangorestframework-jsonapi = "7.0.2"
|
||||
djangorestframework-simplejwt = "^5.3.1"
|
||||
drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "master"}
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
python = ">=3.11,<3.13"
|
||||
uuid6 = "2024.7.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
mypy = "1.10.1"
|
||||
@@ -55,6 +48,8 @@ pytest-env = "1.1.3"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.2.9"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
safety = "3.2.3"
|
||||
vulture = "2.11"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
|
||||
|
||||
|
||||
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
@staticmethod
|
||||
def get_user_by_email(email: str):
|
||||
try:
|
||||
return User.objects.get(email=email)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
if existing_user:
|
||||
sociallogin.connect(request, existing_user)
|
||||
|
||||
def save_user(self, request, sociallogin, form=None):
|
||||
"""
|
||||
Called after the user data is fully populated from the provider
|
||||
and is about to be saved to the DB for the first time.
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = sociallogin.account.extra_data.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
)
|
||||
with rls_transaction(str(tenant.id)):
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
|
||||
)
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
return user
|
||||
@@ -1,23 +1,20 @@
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
import uuid
|
||||
|
||||
from django.db import connection, transaction
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
from api.models import Role, Tenant
|
||||
from api.rbac.permissions import HasPermissions
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [JWTAuthentication]
|
||||
required_permissions = []
|
||||
permission_classes = [permissions.IsAuthenticated, HasPermissions]
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
filter_backends = [
|
||||
filters.QueryParameterValidationFilter,
|
||||
filters.OrderingFilter,
|
||||
@@ -31,17 +28,6 @@ class BaseViewSet(ModelViewSet):
|
||||
ordering_fields = "__all__"
|
||||
ordering = ["id"]
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
"""
|
||||
Sets required_permissions before permissions are checked.
|
||||
"""
|
||||
self.set_required_permissions()
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
def set_required_permissions(self):
|
||||
"""This is an abstract method that must be implemented by subclasses."""
|
||||
NotImplemented
|
||||
|
||||
def get_queryset(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -61,7 +47,13 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
@@ -74,60 +66,13 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
if request.method == "POST":
|
||||
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
|
||||
except Exception as e:
|
||||
self._handle_creation_error(e, tenant)
|
||||
raise
|
||||
|
||||
return tenant
|
||||
|
||||
def _create_admin_role(self, tenant_id):
|
||||
Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant_id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
|
||||
def _handle_creation_error(self, error, tenant):
|
||||
if tenant.data.get("id"):
|
||||
try:
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(
|
||||
id=tenant.data["id"]
|
||||
).delete()
|
||||
except ObjectDoesNotExist:
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.user_id', '{user_id}', TRUE);")
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -147,6 +92,12 @@ class BaseUserViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
|
||||
|
||||
@@ -1,29 +1,18 @@
|
||||
ALLOWED_APPS = ("django", "socialaccount", "account", "authtoken", "silk")
|
||||
|
||||
|
||||
class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_") or any(
|
||||
model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS
|
||||
):
|
||||
if model_table_name.startswith("django_"):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if any(model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS):
|
||||
if model_table_name.startswith("django_"):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
|
||||
return db == self.admin_db
|
||||
|
||||
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
|
||||
# Allow relations if both objects are in either "default" or "admin" db connectors
|
||||
if {obj1._state.db, obj2._state.db} <= {self.default_db, self.admin_db}:
|
||||
return True
|
||||
return None
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.core.paginator import Paginator
|
||||
from django.db import connection, models, transaction
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
|
||||
DB_PASSWORD = (
|
||||
@@ -25,8 +23,6 @@ TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
|
||||
POSTGRES_TENANT_VAR = "api.tenant_id"
|
||||
POSTGRES_USER_VAR = "api.user_id"
|
||||
|
||||
SET_CONFIG_QUERY = "SELECT set_config(%s, %s::text, TRUE);"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def psycopg_connection(database_alias: str):
|
||||
@@ -48,23 +44,10 @@ def psycopg_connection(database_alias: str):
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
|
||||
"""
|
||||
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
|
||||
if the value is a valid UUID.
|
||||
|
||||
Args:
|
||||
value (str): Database configuration parameter value.
|
||||
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
|
||||
"""
|
||||
def tenant_transaction(tenant_id: str):
|
||||
with transaction.atomic():
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# just in case the value is an UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
yield cursor
|
||||
|
||||
|
||||
@@ -106,12 +89,11 @@ def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_SIZE):
|
||||
def batch_delete(queryset, batch_size=5000):
|
||||
"""
|
||||
Deletes objects in batches and returns the total number of deletions and a summary.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the queryset belongs to.
|
||||
queryset (QuerySet): The queryset of objects to delete.
|
||||
batch_size (int): The number of objects to delete in each batch.
|
||||
|
||||
@@ -121,37 +103,21 @@ def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_
|
||||
total_deleted = 0
|
||||
deletion_summary = {}
|
||||
|
||||
while True:
|
||||
with rls_transaction(tenant_id, POSTGRES_TENANT_VAR):
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
paginator = Paginator(queryset.order_by("id").only("id"), batch_size)
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
for page_num in paginator.page_range:
|
||||
batch_ids = [obj.id for obj in paginator.page(page_num).object_list]
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
|
||||
for model_label, count in deleted_info.items():
|
||||
deletion_summary[model_label] = deletion_summary.get(model_label, 0) + count
|
||||
|
||||
return total_deleted, deletion_summary
|
||||
|
||||
|
||||
def delete_related_daily_task(provider_id: str):
|
||||
"""
|
||||
Deletes the periodic task associated with a specific provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The unique identifier for the provider
|
||||
whose related periodic task should be deleted.
|
||||
"""
|
||||
task_name = f"scan-perform-scheduled-{provider_id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
@@ -333,15 +299,3 @@ class InvitationStateEnum(EnumType):
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Integration type
|
||||
|
||||
|
||||
class IntegrationTypeEnum(EnumType):
|
||||
enum_type_name = "integration_type"
|
||||
|
||||
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
import uuid
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
|
||||
|
||||
def set_tenant(func=None, *, keep_tenant=False):
|
||||
def set_tenant(func):
|
||||
"""
|
||||
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
|
||||
|
||||
@@ -35,34 +31,22 @@ def set_tenant(func=None, *, keep_tenant=False):
|
||||
pass
|
||||
|
||||
# When calling the task
|
||||
some_task.delay(arg1, tenant_id="8db7ca86-03cc-4d42-99f6-5e480baf6ab5")
|
||||
some_task.delay(arg1, tenant_id="1234-abcd-5678")
|
||||
|
||||
# The tenant context will be set before the task logic executes.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
if not keep_tenant:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
else:
|
||||
tenant_id = kwargs["tenant_id"]
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
|
||||
return func(*args, **kwargs)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
|
||||
return wrapper
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if func is None:
|
||||
return decorator
|
||||
else:
|
||||
return decorator(func)
|
||||
return wrapper
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from datetime import date, datetime, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
@@ -24,16 +24,13 @@ from api.db_utils import (
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
@@ -287,9 +284,6 @@ class FindingFilter(FilterSet):
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
muted = BooleanFilter(
|
||||
help_text="If this filter is not provided, muted and non-muted findings will be returned."
|
||||
)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
@@ -323,41 +317,13 @@ class FindingFilter(FilterSet):
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(
|
||||
method="filter_inserted_at_gte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
inserted_at__lte = DateFilter(
|
||||
method="filter_inserted_at_lte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
inserted_at__gte = DateFilter(method="filter_inserted_at_gte")
|
||||
inserted_at__lte = DateFilter(method="filter_inserted_at_lte")
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
@@ -385,52 +351,6 @@ class FindingFilter(FilterSet):
|
||||
},
|
||||
}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("inserted_at")
|
||||
or self.data.get("inserted_at__date")
|
||||
or self.data.get("inserted_at__gte")
|
||||
or self.data.get("inserted_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[inserted_at], filter[inserted_at.gte], "
|
||||
"or filter[inserted_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__gte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__lte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
# Convert filter values to UUIDv7 values for use with partitioning
|
||||
def filter_scan_id(self, queryset, name, value):
|
||||
try:
|
||||
@@ -451,7 +371,9 @@ class FindingFilter(FilterSet):
|
||||
)
|
||||
|
||||
return (
|
||||
queryset.filter(id__gte=start).filter(id__lt=end).filter(scan_id=value_uuid)
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan__id=value_uuid)
|
||||
)
|
||||
|
||||
def filter_scan_id_in(self, queryset, name, value):
|
||||
@@ -476,42 +398,31 @@ class FindingFilter(FilterSet):
|
||||
]
|
||||
)
|
||||
if start == end:
|
||||
return queryset.filter(id__gte=start).filter(scan_id__in=uuid_list)
|
||||
return queryset.filter(id__gte=start).filter(scan__id__in=uuid_list)
|
||||
else:
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan_id__in=uuid_list)
|
||||
.filter(scan__id__in=uuid_list)
|
||||
)
|
||||
|
||||
def filter_inserted_at(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__gte=start, id__lt=end)
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__date=value)
|
||||
|
||||
def filter_inserted_at_gte(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__gte=start)
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__gte=value)
|
||||
|
||||
def filter_inserted_at_lte(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__lt=end)
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
@@ -570,26 +481,6 @@ class UserFilter(FilterSet):
|
||||
}
|
||||
|
||||
|
||||
class RoleFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
permission_state = ChoiceFilter(
|
||||
choices=PermissionChoices.choices, method="filter_permission_state"
|
||||
)
|
||||
|
||||
def filter_permission_state(self, queryset, name, value):
|
||||
return Role.filter_by_permission_state(queryset, value)
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
@@ -617,6 +508,12 @@ class ScanSummaryFilter(FilterSet):
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
muted_findings = BooleanFilter(method="filter_muted_findings")
|
||||
|
||||
def filter_muted_findings(self, queryset, name, value):
|
||||
if not value:
|
||||
return queryset.exclude(muted__gt=0)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
@@ -624,39 +521,3 @@ class ScanSummaryFilter(FilterSet):
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
self.data.get("inserted_at"),
|
||||
self.data.get("inserted_at__gte"),
|
||||
self.data.get("inserted_at__lte"),
|
||||
]
|
||||
if not any(inserted_at_filters):
|
||||
raise ValidationError(
|
||||
{
|
||||
"inserted_at": [
|
||||
"At least one of filter[inserted_at], filter[inserted_at__gte], or "
|
||||
"filter[inserted_at__lte] is required."
|
||||
]
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
|
||||
class IntegrationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
|
||||
integration_type__in = ChoiceInFilter(
|
||||
choices=Integration.IntegrationChoices.choices,
|
||||
field_name="integration_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
@@ -122,22 +122,6 @@
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "gke_lucky-coast-419309_us-central1_autopilot-cluster-2",
|
||||
"alias": "k8s_testing_2",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
"unique_resource_count": 1,
|
||||
"duration": 5,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-01T17:25:27.050Z",
|
||||
"started_at": "2024-09-01T17:25:27.050Z",
|
||||
@@ -31,7 +33,9 @@
|
||||
"unique_resource_count": 1,
|
||||
"duration": 20,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T17:24:27.050Z",
|
||||
"started_at": "2024-09-02T17:24:27.050Z",
|
||||
@@ -51,7 +55,9 @@
|
||||
"unique_resource_count": 10,
|
||||
"duration": 10,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["cloudsql_instance_automated_backups"]
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:26:27.050Z",
|
||||
"started_at": "2024-09-02T19:26:27.050Z",
|
||||
@@ -71,7 +77,9 @@
|
||||
"unique_resource_count": 1,
|
||||
"duration": 35,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:27:27.050Z",
|
||||
"started_at": "2024-09-02T19:27:27.050Z",
|
||||
@@ -89,7 +97,9 @@
|
||||
"name": "test scheduled aws scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["cloudformation_stack_outputs_find_secrets"]
|
||||
"checks_to_execute": [
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-09-02T19:20:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:24:27.050Z",
|
||||
@@ -168,7 +178,9 @@
|
||||
"unique_resource_count": 19,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"duration": 7,
|
||||
"scheduled_at": null,
|
||||
@@ -178,56 +190,6 @@
|
||||
"completed_at": "2024-10-18T10:46:05.127Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "6dd8925f-a52d-48de-a546-d2d90db30ab1",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan azure",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "4ca7ce89-3236-41a8-a369-8937bc152af5",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan k8s",
|
||||
"provider": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.823Z",
|
||||
"updated_at": "2024-10-18T10:46:04.841Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.823Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -62,7 +61,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.855Z",
|
||||
"updated_at": "2024-10-18T10:46:04.858Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.855Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -118,7 +116,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.869Z",
|
||||
"updated_at": "2024-10-18T10:46:04.876Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.869Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -174,7 +171,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.888Z",
|
||||
"updated_at": "2024-10-18T10:46:04.892Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.888Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -230,7 +226,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.901Z",
|
||||
"updated_at": "2024-10-18T10:46:04.905Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.901Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -286,7 +281,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.915Z",
|
||||
"updated_at": "2024-10-18T10:46:04.919Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.915Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -342,7 +336,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.929Z",
|
||||
"updated_at": "2024-10-18T10:46:04.934Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.929Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -398,7 +391,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.944Z",
|
||||
"updated_at": "2024-10-18T10:46:04.947Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.944Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -454,7 +446,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.957Z",
|
||||
"updated_at": "2024-10-18T10:46:04.962Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.957Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"delta": "new",
|
||||
"status": "PASS",
|
||||
@@ -510,7 +501,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.971Z",
|
||||
"updated_at": "2024-10-18T10:46:04.975Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.971Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -566,7 +556,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.984Z",
|
||||
"updated_at": "2024-10-18T10:46:04.989Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.984Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -622,7 +611,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.999Z",
|
||||
"updated_at": "2024-10-18T10:46:05.003Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.999Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -678,7 +666,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.013Z",
|
||||
"updated_at": "2024-10-18T10:46:05.018Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.013Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -734,7 +721,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.029Z",
|
||||
"updated_at": "2024-10-18T10:46:05.033Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.029Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -790,7 +776,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.045Z",
|
||||
"updated_at": "2024-10-18T10:46:05.050Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.045Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -846,7 +831,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.061Z",
|
||||
"updated_at": "2024-10-18T10:46:05.065Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.061Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -902,7 +886,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.080Z",
|
||||
"updated_at": "2024-10-18T10:46:05.085Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.080Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -958,7 +941,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.099Z",
|
||||
"updated_at": "2024-10-18T10:46:05.104Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.099Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -1014,7 +996,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.115Z",
|
||||
"updated_at": "2024-10-18T10:46:05.121Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.115Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -1070,7 +1051,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.489Z",
|
||||
"updated_at": "2024-10-18T11:16:24.506Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.823Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1126,7 +1106,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.518Z",
|
||||
"updated_at": "2024-10-18T11:16:24.521Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.855Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1182,7 +1161,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.526Z",
|
||||
"updated_at": "2024-10-18T11:16:24.529Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.869Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1238,7 +1216,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.535Z",
|
||||
"updated_at": "2024-10-18T11:16:24.538Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.888Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1294,7 +1271,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.544Z",
|
||||
"updated_at": "2024-10-18T11:16:24.546Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.901Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1350,7 +1326,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.551Z",
|
||||
"updated_at": "2024-10-18T11:16:24.554Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.915Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1406,7 +1381,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.560Z",
|
||||
"updated_at": "2024-10-18T11:16:24.562Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.929Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1462,7 +1436,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.567Z",
|
||||
"updated_at": "2024-10-18T11:16:24.569Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.944Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1518,7 +1491,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.573Z",
|
||||
"updated_at": "2024-10-18T11:16:24.575Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.957Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"delta": null,
|
||||
"status": "PASS",
|
||||
@@ -1574,7 +1546,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.580Z",
|
||||
"updated_at": "2024-10-18T11:16:24.582Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.971Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1630,7 +1601,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.587Z",
|
||||
"updated_at": "2024-10-18T11:16:24.589Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.984Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1686,7 +1656,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.595Z",
|
||||
"updated_at": "2024-10-18T11:16:24.597Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.999Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1742,7 +1711,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.602Z",
|
||||
"updated_at": "2024-10-18T11:16:24.604Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.013Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1798,7 +1766,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.610Z",
|
||||
"updated_at": "2024-10-18T11:16:24.612Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.029Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1854,7 +1821,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.617Z",
|
||||
"updated_at": "2024-10-18T11:16:24.620Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.045Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1910,7 +1876,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.625Z",
|
||||
"updated_at": "2024-10-18T11:16:24.627Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.061Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1966,7 +1931,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.632Z",
|
||||
"updated_at": "2024-10-18T11:16:24.634Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.080Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -2022,7 +1986,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.639Z",
|
||||
"updated_at": "2024-10-18T11:16:24.642Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.099Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -2078,7 +2041,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.646Z",
|
||||
"updated_at": "2024-10-18T11:16:24.648Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.115Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -2134,7 +2096,6 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:26.033Z",
|
||||
"updated_at": "2024-10-18T11:16:26.045Z",
|
||||
"first_seen_at": "2024-10-18T11:16:26.033Z",
|
||||
"uid": "prowler-aws-account_security_contact_information_is_registered-112233445566-us-east-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "MANUAL",
|
||||
|
||||
@@ -58,96 +58,5 @@
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-13T11:55:41.237Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "admin_test",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z",
|
||||
"updated_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "first_role",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": false,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:31:53.239Z",
|
||||
"updated_at": "2024-11-20T15:31:53.239Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "third_role",
|
||||
"manage_users": false,
|
||||
"manage_account": false,
|
||||
"manage_billing": false,
|
||||
"manage_providers": false,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": false,
|
||||
"inserted_at": "2024-11-20T15:34:05.440Z",
|
||||
"updated_at": "2024-11-20T15:34:05.440Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"inserted_at": "2024-11-20T15:36:14.302Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,237 +0,0 @@
|
||||
import random
|
||||
from datetime import datetime, timezone
|
||||
from math import ceil
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from tqdm import tqdm
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Finding,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
Scan,
|
||||
StatusChoices,
|
||||
)
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Populates the database with test data for performance testing."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--tenant",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Tenant id for which the data will be populated.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resources",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of resources to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--findings",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of findings to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch", type=int, required=True, help="The batch size for bulk creation."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--alias",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Optional alias for the provider and scan",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tenant_id = options["tenant"]
|
||||
num_resources = options["resources"]
|
||||
num_findings = options["findings"]
|
||||
batch_size = options["batch"]
|
||||
alias = options["alias"] or "Testing"
|
||||
uid_token = str(uuid4())
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Starting data population"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tTenant: {tenant_id}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tAlias: {alias}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tResources: {num_resources}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tFindings: {num_findings}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tBatch size: {batch_size}\n\n"))
|
||||
|
||||
# Resource metadata
|
||||
possible_regions = [
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-south-1",
|
||||
"sa-east-1",
|
||||
]
|
||||
possible_services = []
|
||||
possible_types = []
|
||||
|
||||
bulk_check_metadata = CheckMetadata.get_bulk(provider="aws")
|
||||
for check_metadata in bulk_check_metadata.values():
|
||||
if check_metadata.ServiceName not in possible_services:
|
||||
possible_services.append(check_metadata.ServiceName)
|
||||
if (
|
||||
check_metadata.ResourceType
|
||||
and check_metadata.ResourceType not in possible_types
|
||||
):
|
||||
possible_types.append(check_metadata.ResourceType)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
provider, _ = Provider.all_objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider="aws",
|
||||
connected=True,
|
||||
uid=str(random.randint(100000000000, 999999999999)),
|
||||
defaults={
|
||||
"alias": alias,
|
||||
},
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan = Scan.all_objects.create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
name=alias,
|
||||
trigger="manual",
|
||||
state="executing",
|
||||
progress=0,
|
||||
started_at=datetime.now(timezone.utc),
|
||||
)
|
||||
scan_state = "completed"
|
||||
|
||||
try:
|
||||
# Create resources
|
||||
resources = []
|
||||
|
||||
for i in range(num_resources):
|
||||
resources.append(
|
||||
Resource(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider.id,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
name=f"Testing {uid_token}-{i}",
|
||||
region=random.choice(possible_regions),
|
||||
service=random.choice(possible_services),
|
||||
type=random.choice(possible_types),
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(resources) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating resources..."))
|
||||
for i in tqdm(range(0, len(resources), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Resource.all_objects.bulk_create(resources[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Resources created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 33
|
||||
scan.save()
|
||||
|
||||
# Create Findings
|
||||
findings = []
|
||||
possible_deltas = ["new", "changed", None]
|
||||
possible_severities = ["critical", "high", "medium", "low"]
|
||||
findings_resources_mapping = []
|
||||
|
||||
for i in range(num_findings):
|
||||
severity = random.choice(possible_severities)
|
||||
check_id = random.randint(1, 1000)
|
||||
assigned_resource_num = random.randint(0, len(resources) - 1)
|
||||
assigned_resource = resources[assigned_resource_num]
|
||||
findings_resources_mapping.append(assigned_resource_num)
|
||||
|
||||
findings.append(
|
||||
Finding(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
delta=random.choice(possible_deltas),
|
||||
check_id=f"check-{check_id}",
|
||||
status=random.choice(list(StatusChoices)),
|
||||
severity=severity,
|
||||
impact=severity,
|
||||
raw_result={},
|
||||
check_metadata={
|
||||
"checktitle": f"Test title for check {check_id}",
|
||||
"risk": f"Testing risk {uid_token}-{i}",
|
||||
"provider": "aws",
|
||||
"severity": severity,
|
||||
"categories": ["category1", "category2", "category3"],
|
||||
"description": "This is a random description that should not matter for testing purposes.",
|
||||
"servicename": assigned_resource.service,
|
||||
"resourcetype": assigned_resource.type,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(findings) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating findings..."))
|
||||
for i in tqdm(range(0, len(findings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Finding.all_objects.bulk_create(findings[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Findings created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 66
|
||||
scan.save()
|
||||
|
||||
# Create ResourceFindingMapping
|
||||
mappings = []
|
||||
for index, f in enumerate(findings):
|
||||
mappings.append(
|
||||
ResourceFindingMapping(
|
||||
tenant_id=tenant_id,
|
||||
resource=resources[findings_resources_mapping[index]],
|
||||
finding=f,
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(mappings) / batch_size)
|
||||
self.stdout.write(
|
||||
self.style.WARNING("Creating resource-finding mappings...")
|
||||
)
|
||||
for i in tqdm(range(0, len(mappings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceFindingMapping.objects.bulk_create(
|
||||
mappings[i : i + batch_size]
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Resource-finding mappings created successfully.\n\n"
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
|
||||
scan_state = "failed"
|
||||
finally:
|
||||
scan.completed_at = datetime.now(timezone.utc)
|
||||
scan.duration = int(
|
||||
(datetime.now(timezone.utc) - scan.started_at).total_seconds()
|
||||
)
|
||||
scan.progress = 100
|
||||
scan.state = scan_state
|
||||
scan.unique_resource_count = num_resources
|
||||
with rls_transaction(tenant_id):
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Successfully populated test data."))
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-20 13:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0002_token_migrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="provider",
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="provider",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "uid", "is_deleted"),
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,248 +0,0 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-05 12:29
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0003_update_provider_unique_constraint_with_is_deleted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Role",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("manage_users", models.BooleanField(default=False)),
|
||||
("manage_account", models.BooleanField(default=False)),
|
||||
("manage_billing", models.BooleanField(default=False)),
|
||||
("manage_providers", models.BooleanField(default=False)),
|
||||
("manage_integrations", models.BooleanField(default=False)),
|
||||
("manage_scans", models.BooleanField(default=False)),
|
||||
("unlimited_visibility", models.BooleanField(default=False)),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "roles",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RoleProviderGroupRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_provider_group_relationship",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_user_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="provider_group",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="provider_groups",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.RoleProviderGroupRelationship",
|
||||
to="api.providergroup",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="user",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="users",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.UserRoleRelationship",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "provider_group_id"),
|
||||
name="unique_role_provider_group_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_roleprovidergrouprelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "user_id"), name="unique_role_user_relationship"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_userrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "name"), name="unique_role_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_role",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InvitationRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"invitation",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
|
||||
),
|
||||
),
|
||||
(
|
||||
"role",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_invitation_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "invitation_id"),
|
||||
name="unique_role_invitation_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_invitationrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="invitations",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.InvitationRoleRelationship",
|
||||
to="api.invitation",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,44 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_router import MainRouter
|
||||
|
||||
|
||||
def create_admin_role(apps, schema_editor):
|
||||
Tenant = apps.get_model("api", "Tenant")
|
||||
Role = apps.get_model("api", "Role")
|
||||
User = apps.get_model("api", "User")
|
||||
UserRoleRelationship = apps.get_model("api", "UserRoleRelationship")
|
||||
|
||||
for tenant in Tenant.objects.using(MainRouter.admin_db).all():
|
||||
admin_role, _ = Role.objects.using(MainRouter.admin_db).get_or_create(
|
||||
name="admin",
|
||||
tenant=tenant,
|
||||
defaults={
|
||||
"manage_users": True,
|
||||
"manage_account": True,
|
||||
"manage_billing": True,
|
||||
"manage_providers": True,
|
||||
"manage_integrations": True,
|
||||
"manage_scans": True,
|
||||
"unlimited_visibility": True,
|
||||
},
|
||||
)
|
||||
users = User.objects.using(MainRouter.admin_db).filter(
|
||||
membership__tenant=tenant
|
||||
)
|
||||
for user in users:
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).get_or_create(
|
||||
user=user,
|
||||
role=admin_role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0004_rbac"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_admin_role),
|
||||
]
|
||||
@@ -1,15 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0005_rbac_missing_admin_roles"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="first_seen_at",
|
||||
field=models.DateTimeField(editable=False, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-01-28 15:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0006_findings_first_seen"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="scan_summaries_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,64 +0,0 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Scan, StateChoices
|
||||
|
||||
|
||||
def migrate_daily_scheduled_scan_tasks(apps, schema_editor):
|
||||
for daily_scheduled_scan_task in PeriodicTask.objects.filter(
|
||||
task="scan-perform-scheduled"
|
||||
):
|
||||
task_kwargs = json.loads(daily_scheduled_scan_task.kwargs)
|
||||
tenant_id = task_kwargs["tenant_id"]
|
||||
provider_id = task_kwargs["provider_id"]
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
scheduled_time_today = datetime.combine(
|
||||
current_time.date(),
|
||||
daily_scheduled_scan_task.start_time.time(),
|
||||
tzinfo=timezone.utc,
|
||||
)
|
||||
|
||||
if current_time < scheduled_time_today:
|
||||
next_scan_date = scheduled_time_today
|
||||
else:
|
||||
next_scan_date = scheduled_time_today + timedelta(days=1)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
name="Daily scheduled scan",
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduled_at=next_scan_date,
|
||||
scheduler_task_id=daily_scheduled_scan_task.id,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0007_scan_and_scan_summaries_indexes"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrate_daily_scheduled_scan_tasks),
|
||||
]
|
||||
@@ -1,22 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-02-07 09:42
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0008_daily_scheduled_tasks_update"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="uid",
|
||||
field=models.CharField(
|
||||
max_length=250,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
verbose_name="Unique identifier for the provider, set by the provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,109 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import connection, migrations
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, schema_editor, parent_table: str, index_name: str, index_details: str
|
||||
):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
# Iterate over partitions and create index concurrently.
|
||||
# Note: PostgreSQL does not allow CONCURRENTLY inside a transaction,
|
||||
# so we need atomic = False for this migration.
|
||||
for partition in partitions:
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {partition.replace('.', '_')}_{index_name} ON {partition} "
|
||||
f"{index_details};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(apps, schema_editor, parent_table: str, index_name: str):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
# Iterate over partitions and drop index concurrently.
|
||||
for partition in partitions:
|
||||
partition_index = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {partition_index};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0009_increase_provider_uid_maximum_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
index_details="(tenant_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
index_details="(tenant_id, scan_id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
index_details="(tenant_id, scan_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
index_details="(tenant_id, id) where delta = 'new'",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,49 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0010_findings_performance_indexes_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "id"], name="find_tenant_scan_id_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
condition=models.Q(("delta", "new")),
|
||||
fields=["tenant_id", "id"],
|
||||
name="find_delta_new_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="resourcetagmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "resource_id"], name="resource_tag_tenant_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "service", "region", "type"],
|
||||
name="resource_tenant_metadata_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,15 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0011_findings_performance_indexes_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="output_location",
|
||||
field=models.CharField(blank=True, max_length=200, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,35 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
|
||||
from api.models import Integration
|
||||
|
||||
IntegrationTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="integration_type",
|
||||
enum_values=tuple(
|
||||
integration_type[0]
|
||||
for integration_type in Integration.IntegrationChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0012_scan_report_output"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
IntegrationTypeEnumMigration.create_enum_type,
|
||||
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=IntegrationTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,131 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0013_integrations_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Integration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("enabled", models.BooleanField(default=False)),
|
||||
("connected", models.BooleanField(blank=True, null=True)),
|
||||
(
|
||||
"connection_last_checked_at",
|
||||
models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
(
|
||||
"integration_type",
|
||||
api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("saml", "SAML"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
("_credentials", models.BinaryField(db_column="credentials")),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={"db_table": "integrations", "abstract": False},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="integration",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IntegrationProviderRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"integration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.integration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "integration_provider_mappings",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("integration_id", "provider_id"),
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="IntegrationProviderRelationship",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integrationproviderrelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="integration",
|
||||
name="providers",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="integrations",
|
||||
through="api.IntegrationProviderRelationship",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,26 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-25 11:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0014_integrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="finding",
|
||||
name="status",
|
||||
field=api.db_utils.StatusEnumField(
|
||||
choices=[("FAIL", "Fail"), ("PASS", "Pass"), ("MANUAL", "Manual")]
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,32 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-31 10:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0015_finding_muted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="compliance",
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="details",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="metadata",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="partition",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -11,7 +11,6 @@ from django.core.validators import MinLengthValidator
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.models import TaskResult
|
||||
from psqlextra.manager import PostgresManager
|
||||
from psqlextra.models import PostgresPartitionedModel
|
||||
@@ -21,7 +20,6 @@ from uuid6 import uuid7
|
||||
from api.db_utils import (
|
||||
CustomUserManager,
|
||||
FindingDeltaEnumField,
|
||||
IntegrationTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
MemberRoleEnumField,
|
||||
ProviderEnumField,
|
||||
@@ -59,6 +57,7 @@ class StatusChoices(models.TextChoices):
|
||||
FAIL = "FAIL", _("Fail")
|
||||
PASS = "PASS", _("Pass")
|
||||
MANUAL = "MANUAL", _("Manual")
|
||||
MUTED = "MUTED", _("Muted")
|
||||
|
||||
|
||||
class StateChoices(models.TextChoices):
|
||||
@@ -70,21 +69,6 @@ class StateChoices(models.TextChoices):
|
||||
CANCELLED = "cancelled", _("Cancelled")
|
||||
|
||||
|
||||
class PermissionChoices(models.TextChoices):
|
||||
"""
|
||||
Represents the different permission states that a role can have.
|
||||
|
||||
Attributes:
|
||||
UNLIMITED: Indicates that the role possesses all permissions.
|
||||
LIMITED: Indicates that the role has some permissions but not all.
|
||||
NONE: Indicates that the role does not have any permissions.
|
||||
"""
|
||||
|
||||
UNLIMITED = "unlimited", _("Unlimited permissions")
|
||||
LIMITED = "limited", _("Limited permissions")
|
||||
NONE = "none", _("No permissions")
|
||||
|
||||
|
||||
class ActiveProviderManager(models.Manager):
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(self.active_provider_filter())
|
||||
@@ -227,13 +211,13 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
@staticmethod
|
||||
def validate_kubernetes_uid(value):
|
||||
if not re.match(
|
||||
r"^[a-z0-9][A-Za-z0-9_.:\/-]{1,250}$",
|
||||
r"(^[a-z0-9]([-a-z0-9]{1,61}[a-z0-9])?$)|(^arn:aws(-cn|-us-gov|-iso|-iso-b)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/:\.\*]+(:\d+)?$)",
|
||||
value,
|
||||
):
|
||||
raise ModelValidationError(
|
||||
detail="The value must either be a valid Kubernetes UID (up to 63 characters, "
|
||||
"starting and ending with a lowercase letter or number, containing only "
|
||||
"lowercase alphanumeric characters and hyphens) or a valid AWS EKS Cluster ARN, GCP GKE Context Name or Azure AKS Cluster Name.",
|
||||
"lowercase alphanumeric characters and hyphens) or a valid EKS ARN.",
|
||||
code="kubernetes-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
@@ -247,7 +231,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
)
|
||||
uid = models.CharField(
|
||||
"Unique identifier for the provider, set by the provider",
|
||||
max_length=250,
|
||||
max_length=63,
|
||||
blank=False,
|
||||
validators=[MinLengthValidator(3)],
|
||||
)
|
||||
@@ -272,7 +256,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "uid", "is_deleted"),
|
||||
fields=("tenant_id", "provider", "uid"),
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
@@ -314,10 +298,19 @@ class ProviderGroup(RowLevelSecurityProtectedModel):
|
||||
|
||||
|
||||
class ProviderGroupMembership(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
provider_group = models.ForeignKey(
|
||||
ProviderGroup,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
|
||||
class Meta:
|
||||
db_table = "provider_group_memberships"
|
||||
@@ -334,7 +327,7 @@ class ProviderGroupMembership(RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "provider_groups-provider"
|
||||
resource_name = "provider-group-memberships"
|
||||
|
||||
|
||||
class Task(RowLevelSecurityProtectedModel):
|
||||
@@ -411,10 +404,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
started_at = models.DateTimeField(null=True, blank=True)
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
next_scan_at = models.DateTimeField(null=True, blank=True)
|
||||
scheduler_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
|
||||
)
|
||||
output_location = models.CharField(blank=True, null=True, max_length=200)
|
||||
# TODO: mutelist foreign key
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
@@ -433,10 +422,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
fields=["provider", "state", "trigger", "scheduled_at"],
|
||||
name="scans_prov_state_trig_sche_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -518,19 +503,14 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
editable=False,
|
||||
)
|
||||
|
||||
metadata = models.TextField(blank=True, null=True)
|
||||
details = models.TextField(blank=True, null=True)
|
||||
partition = models.TextField(blank=True, null=True)
|
||||
|
||||
# Relationships
|
||||
tags = models.ManyToManyField(
|
||||
ResourceTag,
|
||||
verbose_name="Tags associated with the resource, by provider",
|
||||
through="ResourceTagMapping",
|
||||
)
|
||||
|
||||
def get_tags(self, tenant_id: str) -> dict:
|
||||
return {tag.key: tag.value for tag in self.tags.filter(tenant_id=tenant_id)}
|
||||
def get_tags(self) -> dict:
|
||||
return {tag.key: tag.value for tag in self.tags.all()}
|
||||
|
||||
def clear_tags(self):
|
||||
self.tags.clear()
|
||||
@@ -558,10 +538,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
fields=["uid", "region", "service", "name"],
|
||||
name="resource_uid_reg_serv_name_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "service", "region", "type"],
|
||||
name="resource_tenant_metadata_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
|
||||
]
|
||||
|
||||
@@ -609,12 +585,6 @@ class ResourceTagMapping(RowLevelSecurityProtectedModel):
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "resource_id"], name="resource_tag_tenant_idx"
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
@@ -639,7 +609,6 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid7, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
first_seen_at = models.DateTimeField(editable=False, null=True)
|
||||
|
||||
uid = models.CharField(max_length=300)
|
||||
delta = FindingDeltaEnumField(
|
||||
@@ -660,8 +629,6 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
tags = models.JSONField(default=dict, null=True, blank=True)
|
||||
check_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
check_metadata = models.JSONField(default=dict, null=False)
|
||||
muted = models.BooleanField(default=False, null=False)
|
||||
compliance = models.JSONField(default=dict, null=True, blank=True)
|
||||
|
||||
# Relationships
|
||||
scan = models.ForeignKey(to=Scan, related_name="findings", on_delete=models.CASCADE)
|
||||
@@ -715,17 +682,7 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
],
|
||||
name="findings_filter_idx",
|
||||
),
|
||||
models.Index(fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"),
|
||||
GinIndex(fields=["text_search"], name="gin_findings_search_idx"),
|
||||
models.Index(fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "id"], name="find_tenant_scan_id_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
condition=Q(delta="new"),
|
||||
name="find_delta_new_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -894,150 +851,6 @@ class Invitation(RowLevelSecurityProtectedModel):
|
||||
resource_name = "invitations"
|
||||
|
||||
|
||||
class Role(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
name = models.CharField(max_length=255)
|
||||
manage_users = models.BooleanField(default=False)
|
||||
manage_account = models.BooleanField(default=False)
|
||||
manage_billing = models.BooleanField(default=False)
|
||||
manage_providers = models.BooleanField(default=False)
|
||||
manage_integrations = models.BooleanField(default=False)
|
||||
manage_scans = models.BooleanField(default=False)
|
||||
unlimited_visibility = models.BooleanField(default=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
provider_groups = models.ManyToManyField(
|
||||
ProviderGroup, through="RoleProviderGroupRelationship", related_name="roles"
|
||||
)
|
||||
users = models.ManyToManyField(
|
||||
User, through="UserRoleRelationship", related_name="roles"
|
||||
)
|
||||
invitations = models.ManyToManyField(
|
||||
Invitation, through="InvitationRoleRelationship", related_name="roles"
|
||||
)
|
||||
|
||||
# Filter permission_state
|
||||
PERMISSION_FIELDS = [
|
||||
"manage_users",
|
||||
"manage_account",
|
||||
"manage_billing",
|
||||
"manage_providers",
|
||||
"manage_integrations",
|
||||
"manage_scans",
|
||||
]
|
||||
|
||||
@property
|
||||
def permission_state(self):
|
||||
values = [getattr(self, field) for field in self.PERMISSION_FIELDS]
|
||||
if all(values):
|
||||
return PermissionChoices.UNLIMITED
|
||||
elif not any(values):
|
||||
return PermissionChoices.NONE
|
||||
else:
|
||||
return PermissionChoices.LIMITED
|
||||
|
||||
@classmethod
|
||||
def filter_by_permission_state(cls, queryset, value):
|
||||
q_all_true = Q(**{field: True for field in cls.PERMISSION_FIELDS})
|
||||
q_all_false = Q(**{field: False for field in cls.PERMISSION_FIELDS})
|
||||
|
||||
if value == PermissionChoices.UNLIMITED:
|
||||
return queryset.filter(q_all_true)
|
||||
elif value == PermissionChoices.NONE:
|
||||
return queryset.filter(q_all_false)
|
||||
else:
|
||||
return queryset.exclude(q_all_true | q_all_false)
|
||||
|
||||
class Meta:
|
||||
db_table = "roles"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id", "name"],
|
||||
name="unique_role_per_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "roles"
|
||||
|
||||
|
||||
class RoleProviderGroupRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
role = models.ForeignKey(Role, on_delete=models.CASCADE)
|
||||
provider_group = models.ForeignKey(ProviderGroup, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "role_provider_group_relationship"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["role_id", "provider_group_id"],
|
||||
name="unique_role_provider_group_relationship",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "role-provider_groups"
|
||||
|
||||
|
||||
class UserRoleRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
role = models.ForeignKey(Role, on_delete=models.CASCADE)
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "role_user_relationship"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["role_id", "user_id"],
|
||||
name="unique_role_user_relationship",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "user-roles"
|
||||
|
||||
|
||||
class InvitationRoleRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
role = models.ForeignKey(Role, on_delete=models.CASCADE)
|
||||
invitation = models.ForeignKey(Invitation, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "role_invitation_relationship"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["role_id", "invitation_id"],
|
||||
name="unique_role_invitation_relationship",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "invitation-roles"
|
||||
|
||||
|
||||
class ComplianceOverview(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
@@ -1136,89 +949,6 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"],
|
||||
name="scan_summaries_tenant_scan_idx",
|
||||
)
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scan-summaries"
|
||||
|
||||
|
||||
class Integration(RowLevelSecurityProtectedModel):
|
||||
class IntegrationChoices(models.TextChoices):
|
||||
S3 = "amazon_s3", _("Amazon S3")
|
||||
SAML = "saml", _("SAML")
|
||||
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
|
||||
JIRA = "jira", _("JIRA")
|
||||
SLACK = "slack", _("Slack")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
enabled = models.BooleanField(default=False)
|
||||
connected = models.BooleanField(null=True, blank=True)
|
||||
connection_last_checked_at = models.DateTimeField(null=True, blank=True)
|
||||
integration_type = IntegrationTypeEnumField(choices=IntegrationChoices.choices)
|
||||
configuration = models.JSONField(default=dict)
|
||||
_credentials = models.BinaryField(db_column="credentials")
|
||||
|
||||
providers = models.ManyToManyField(
|
||||
Provider,
|
||||
related_name="integrations",
|
||||
through="IntegrationProviderRelationship",
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "integrations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "integrations"
|
||||
|
||||
@property
|
||||
def credentials(self):
|
||||
if isinstance(self._credentials, memoryview):
|
||||
encrypted_bytes = self._credentials.tobytes()
|
||||
elif isinstance(self._credentials, str):
|
||||
encrypted_bytes = self._credentials.encode()
|
||||
else:
|
||||
encrypted_bytes = self._credentials
|
||||
decrypted_data = fernet.decrypt(encrypted_bytes)
|
||||
return json.loads(decrypted_data.decode())
|
||||
|
||||
@credentials.setter
|
||||
def credentials(self, value):
|
||||
encrypted_data = fernet.encrypt(json.dumps(value).encode())
|
||||
self._credentials = encrypted_data
|
||||
|
||||
|
||||
class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
integration = models.ForeignKey(Integration, on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "integration_provider_mappings"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["integration_id", "provider_id"],
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
@@ -4,11 +4,11 @@ from typing import Generator, Optional
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.conf import settings
|
||||
from psqlextra.partitioning import (
|
||||
PostgresPartitioningError,
|
||||
PostgresPartitioningManager,
|
||||
PostgresRangePartition,
|
||||
PostgresRangePartitioningStrategy,
|
||||
PostgresTimePartitionSize,
|
||||
PostgresPartitioningError,
|
||||
)
|
||||
from psqlextra.partitioning.config import PostgresPartitioningConfig
|
||||
from uuid6 import UUID
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from rest_framework.permissions import BasePermission
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Provider, Role, User
|
||||
|
||||
|
||||
class Permissions(Enum):
|
||||
MANAGE_USERS = "manage_users"
|
||||
MANAGE_ACCOUNT = "manage_account"
|
||||
MANAGE_BILLING = "manage_billing"
|
||||
MANAGE_PROVIDERS = "manage_providers"
|
||||
MANAGE_INTEGRATIONS = "manage_integrations"
|
||||
MANAGE_SCANS = "manage_scans"
|
||||
UNLIMITED_VISIBILITY = "unlimited_visibility"
|
||||
|
||||
|
||||
class HasPermissions(BasePermission):
|
||||
"""
|
||||
Custom permission to check if the user's role has the required permissions.
|
||||
The required permissions should be specified in the view as a list in `required_permissions`.
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
required_permissions = getattr(view, "required_permissions", [])
|
||||
if not required_permissions:
|
||||
return True
|
||||
|
||||
user_roles = (
|
||||
User.objects.using(MainRouter.admin_db).get(id=request.user.id).roles.all()
|
||||
)
|
||||
if not user_roles:
|
||||
return False
|
||||
|
||||
for perm in required_permissions:
|
||||
if not getattr(user_roles[0], perm.value, False):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_role(user: User) -> Optional[Role]:
|
||||
"""
|
||||
Retrieve the first role assigned to the given user.
|
||||
|
||||
Returns:
|
||||
The user's first Role instance if the user has any roles, otherwise None.
|
||||
"""
|
||||
return user.roles.first()
|
||||
|
||||
|
||||
def get_providers(role: Role) -> QuerySet[Provider]:
|
||||
"""
|
||||
Return a distinct queryset of Providers accessible by the given role.
|
||||
|
||||
If the role has no associated provider groups, an empty queryset is returned.
|
||||
|
||||
Args:
|
||||
role: A Role instance.
|
||||
|
||||
Returns:
|
||||
A QuerySet of Provider objects filtered by the role's provider groups.
|
||||
If the role has no provider groups, returns an empty queryset.
|
||||
"""
|
||||
tenant = role.tenant
|
||||
provider_groups = role.provider_groups.all()
|
||||
if not provider_groups.exists():
|
||||
return Provider.objects.none()
|
||||
|
||||
return Provider.objects.filter(
|
||||
tenant=tenant, provider_groups__in=provider_groups
|
||||
).distinct()
|
||||
@@ -2,7 +2,7 @@ from contextlib import nullcontext
|
||||
|
||||
from rest_framework_json_api.renderers import JSONRenderer
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.db_utils import tenant_transaction
|
||||
|
||||
|
||||
class APIJSONRenderer(JSONRenderer):
|
||||
@@ -13,9 +13,9 @@ class APIJSONRenderer(JSONRenderer):
|
||||
tenant_id = getattr(request, "tenant_id", None) if request else None
|
||||
include_param_present = "include" in request.query_params if request else False
|
||||
|
||||
# Use rls_transaction if needed for included resources, otherwise do nothing
|
||||
# Use tenant_transaction if needed for included resources, otherwise do nothing
|
||||
context_manager = (
|
||||
rls_transaction(tenant_id)
|
||||
tenant_transaction(tenant_id)
|
||||
if tenant_id and include_param_present
|
||||
else nullcontext()
|
||||
)
|
||||
|
||||
@@ -58,11 +58,11 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -87,7 +87,9 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = f"{self.rls_sql_query}{policy_queries}{grant_queries}"
|
||||
full_create_sql_query = (
|
||||
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
|
||||
)
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
@@ -104,20 +106,16 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
raw_table_name = model._meta.db_table
|
||||
table_name = raw_table_name
|
||||
if self.partition_name:
|
||||
raw_table_name = f"{raw_table_name}_{self.partition_name}"
|
||||
table_name = raw_table_name
|
||||
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
|
||||
)
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
raw_table_name=raw_table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
@@ -132,7 +130,9 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
path, _, kwargs = super().deconstruct()
|
||||
return (path, (self.target_field,), kwargs)
|
||||
|
||||
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS): # noqa: F841
|
||||
def validate(
|
||||
self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS
|
||||
): # noqa: F841
|
||||
if not hasattr(instance, "tenant_id"):
|
||||
raise ValidationError(f"{model.__name__} does not have a tenant_id field.")
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ from celery.signals import before_task_publish
|
||||
from config.celery import celery_app
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
@@ -31,4 +31,5 @@ before_task_publish.connect(
|
||||
@receiver(post_delete, sender=Provider)
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
delete_related_daily_task(instance.id)
|
||||
task_name = f"scan-perform-scheduled-{instance.id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,12 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import Membership, User
|
||||
|
||||
|
||||
@patch("api.v1.views.MainRouter.admin_db", new="default")
|
||||
@pytest.mark.django_db
|
||||
def test_basic_authentication():
|
||||
client = APIClient()
|
||||
@@ -97,204 +98,3 @@ def test_refresh_token(create_test_user, tenants_fixture):
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert new_refresh_response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fixture):
|
||||
client = APIClient()
|
||||
|
||||
role = roles_fixture[0]
|
||||
|
||||
user1_email = "user1@testing.com"
|
||||
user2_email = "user2@testing.com"
|
||||
|
||||
password = "thisisapassword123"
|
||||
|
||||
user1_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "user1",
|
||||
"email": user1_email,
|
||||
"password": password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user1_response.status_code == 201
|
||||
|
||||
user1_access_token, _ = get_api_tokens(client, user1_email, password)
|
||||
user1_headers = get_authorization_header(user1_access_token)
|
||||
|
||||
user2_invitation = client.post(
|
||||
reverse("invitation-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "invitations",
|
||||
"attributes": {"email": user2_email},
|
||||
"relationships": {
|
||||
"roles": {
|
||||
"data": [
|
||||
{
|
||||
"type": "roles",
|
||||
"id": str(role.id),
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=user1_headers,
|
||||
)
|
||||
assert user2_invitation.status_code == 201
|
||||
invitation_token = user2_invitation.json()["data"]["attributes"]["token"]
|
||||
|
||||
user2_response = client.post(
|
||||
reverse("user-list") + f"?invitation_token={invitation_token}",
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "user2",
|
||||
"email": user2_email,
|
||||
"password": password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user2_response.status_code == 201
|
||||
|
||||
user2_access_token, _ = get_api_tokens(client, user2_email, password)
|
||||
user2_headers = get_authorization_header(user2_access_token)
|
||||
|
||||
user1_me = client.get(reverse("user-me"), headers=user1_headers)
|
||||
assert user1_me.status_code == 200
|
||||
assert user1_me.json()["data"]["attributes"]["email"] == user1_email
|
||||
|
||||
user2_me = client.get(reverse("user-me"), headers=user2_headers)
|
||||
assert user2_me.status_code == 200
|
||||
assert user2_me.json()["data"]["attributes"]["email"] == user2_email
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTokenSwitchTenant:
|
||||
def test_switch_tenant_with_valid_token(self, tenants_fixture, providers_fixture):
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
# Create a new relationship between this user and another tenant
|
||||
tenant_id = tenants_fixture[0].id
|
||||
user_instance = User.objects.get(email=test_user)
|
||||
Membership.objects.create(user=user_instance, tenant_id=tenant_id)
|
||||
|
||||
# Check that using our new user's credentials we can authenticate and get the providers
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
user_me_response = client.get(
|
||||
reverse("user-me"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert user_me_response.status_code == 200
|
||||
# Assert this user belongs to two tenants
|
||||
assert (
|
||||
user_me_response.json()["data"]["relationships"]["memberships"]["meta"][
|
||||
"count"
|
||||
]
|
||||
== 2
|
||||
)
|
||||
|
||||
provider_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert provider_response.status_code == 200
|
||||
# Empty response since there are no providers in this tenant
|
||||
assert not provider_response.json()["data"]
|
||||
|
||||
switch_tenant_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": tenant_id},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert switch_tenant_response.status_code == 200
|
||||
new_access_token = switch_tenant_response.json()["data"]["attributes"]["access"]
|
||||
new_auth_headers = get_authorization_header(new_access_token)
|
||||
|
||||
provider_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=new_auth_headers,
|
||||
)
|
||||
assert provider_response.status_code == 200
|
||||
# Now it must be data because we switched to another tenant with providers
|
||||
assert provider_response.json()["data"]
|
||||
|
||||
def test_switch_tenant_with_invalid_token(self, create_test_user, tenants_fixture):
|
||||
client = APIClient()
|
||||
|
||||
access_token, refresh_token = get_api_tokens(
|
||||
client, create_test_user.email, TEST_PASSWORD
|
||||
)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
invalid_token_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": "invalid_tenant_id"},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert invalid_token_response.status_code == 400
|
||||
assert invalid_token_response.json()["errors"][0]["code"] == "invalid"
|
||||
assert (
|
||||
invalid_token_response.json()["errors"][0]["detail"]
|
||||
== "Must be a valid UUID."
|
||||
)
|
||||
|
||||
invalid_tenant_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": tenants_fixture[-1].id},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert invalid_tenant_response.status_code == 400
|
||||
assert invalid_tenant_response.json()["errors"][0]["code"] == "invalid"
|
||||
assert invalid_tenant_response.json()["errors"][0]["detail"] == (
|
||||
"Tenant does not exist or user is not a " "member."
|
||||
)
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.delete_provider_task.delay")
|
||||
@pytest.mark.django_db
|
||||
def test_delete_provider_without_executing_task(
|
||||
mock_delete_task, mock_task_get, create_test_user, tenants_fixture, tasks_fixture
|
||||
):
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
mock_delete_task.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
create_provider_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": Provider.ProviderChoices.AWS,
|
||||
"uid": "123456789012",
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert create_provider_response.status_code == 201
|
||||
provider_id = create_provider_response.json()["data"]["id"]
|
||||
provider_uid = create_provider_response.json()["data"]["attributes"]["uid"]
|
||||
|
||||
remove_provider = client.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider_id}),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert remove_provider.status_code == 202
|
||||
|
||||
recreate_provider_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": Provider.ProviderChoices.AWS,
|
||||
"uid": provider_uid,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert recreate_provider_response.status_code == 201
|
||||
@@ -1,10 +1,9 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from conftest import TEST_PASSWORD, TEST_USER, get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
|
||||
from conftest import TEST_USER, TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
|
||||
|
||||
@patch("api.v1.views.schedule_provider_scan")
|
||||
@pytest.mark.django_db
|
||||
@@ -13,7 +12,6 @@ def test_check_resources_between_different_tenants(
|
||||
enforce_test_user_db_connection,
|
||||
authenticated_api_client,
|
||||
tenants_fixture,
|
||||
set_user_admin_roles_fixture,
|
||||
):
|
||||
client = authenticated_api_client
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from unittest.mock import patch, MagicMock
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from api.compliance import (
|
||||
generate_compliance_overview_template,
|
||||
generate_scan_compliance,
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_compliance,
|
||||
load_prowler_checks,
|
||||
generate_scan_compliance,
|
||||
generate_compliance_overview_template,
|
||||
load_prowler_compliance,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
@@ -69,7 +69,7 @@ class TestCompliance:
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
|
||||
from api.compliance import PROWLER_CHECKS, PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
|
||||
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
|
||||
"template_key": "template_value"
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
import pytest
|
||||
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
|
||||
from django.conf import settings
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db.utils import ConnectionRouter
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.rls import Tenant
|
||||
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@patch("api.db_router.MainRouter.admin_db", new="admin")
|
||||
class TestMainDatabaseRouter:
|
||||
@pytest.fixture(scope="module")
|
||||
def router(self):
|
||||
|
||||
@@ -2,15 +2,7 @@ from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from api.db_utils import (
|
||||
batch_delete,
|
||||
enum_to_choices,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
)
|
||||
from api.models import Provider
|
||||
from api.db_utils import enum_to_choices, generate_random_token, one_week_from_now
|
||||
|
||||
|
||||
class TestEnumToChoices:
|
||||
@@ -114,27 +106,3 @@ class TestGenerateRandomToken:
|
||||
token = generate_random_token(length=5, symbols="")
|
||||
# Default symbols
|
||||
assert len(token) == 5
|
||||
|
||||
|
||||
class TestBatchDelete:
|
||||
@pytest.fixture
|
||||
def create_test_providers(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
provider_id = 123456789012
|
||||
provider_count = 10
|
||||
for i in range(provider_count):
|
||||
Provider.objects.create(
|
||||
tenant=tenant,
|
||||
uid=f"{provider_id + i}",
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
return provider_count
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_batch_delete(self, tenants_fixture, create_test_providers):
|
||||
tenant_id = str(tenants_fixture[0].id)
|
||||
_, summary = batch_delete(
|
||||
tenant_id, Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import uuid
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.decorators import set_tenant
|
||||
|
||||
|
||||
@@ -17,12 +15,12 @@ class TestSetTenantDecorator:
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
tenant_id = str(uuid.uuid4())
|
||||
tenant_id = "1234-abcd-5678"
|
||||
|
||||
result = random_func("test_arg", tenant_id=tenant_id)
|
||||
|
||||
assert (
|
||||
call(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
call(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
in mock_cursor.execute.mock_calls
|
||||
)
|
||||
assert result == "test_arg"
|
||||
|
||||
@@ -7,10 +7,9 @@ from api.models import Resource, ResourceTag
|
||||
class TestResourceModel:
|
||||
def test_setting_tags(self, providers_fixture):
|
||||
provider, *_ = providers_fixture
|
||||
tenant_id = provider.tenant_id
|
||||
|
||||
resource = Resource.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
tenant_id=provider.tenant_id,
|
||||
provider=provider,
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0",
|
||||
name="My Instance 1",
|
||||
@@ -21,12 +20,12 @@ class TestResourceModel:
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key",
|
||||
value="value",
|
||||
),
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key2",
|
||||
value="value2",
|
||||
),
|
||||
@@ -34,9 +33,9 @@ class TestResourceModel:
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert len(tags) == len(resource.tags.filter(tenant_id=tenant_id))
|
||||
assert len(tags) == len(resource.tags.all())
|
||||
|
||||
tags_dict = resource.get_tags(tenant_id=tenant_id)
|
||||
tags_dict = resource.get_tags()
|
||||
|
||||
for tag in tags:
|
||||
assert tag.key in tags_dict
|
||||
@@ -44,51 +43,47 @@ class TestResourceModel:
|
||||
|
||||
def test_adding_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
tenant_id=resource.tenant_id,
|
||||
key="env",
|
||||
value="test",
|
||||
),
|
||||
]
|
||||
before_count = len(resource.tags.filter(tenant_id=tenant_id))
|
||||
before_count = len(resource.tags.all())
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert before_count + 1 == len(resource.tags.filter(tenant_id=tenant_id))
|
||||
assert before_count + 1 == len(resource.tags.all())
|
||||
|
||||
tags_dict = resource.get_tags(tenant_id=tenant_id)
|
||||
tags_dict = resource.get_tags()
|
||||
|
||||
assert "env" in tags_dict
|
||||
assert tags_dict["env"] == "test"
|
||||
|
||||
def test_adding_duplicate_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
|
||||
tags = resource.tags.filter(tenant_id=tenant_id)
|
||||
tags = resource.tags.all()
|
||||
|
||||
before_count = len(resource.tags.filter(tenant_id=tenant_id))
|
||||
before_count = len(resource.tags.all())
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
# should be the same number of tags
|
||||
assert before_count == len(resource.tags.filter(tenant_id=tenant_id))
|
||||
assert before_count == len(resource.tags.all())
|
||||
|
||||
def test_add_tags_none(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
resource.upsert_or_delete_tags(None)
|
||||
|
||||
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
|
||||
assert resource.get_tags(tenant_id=tenant_id) == {}
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
|
||||
def test_clear_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
tenant_id = str(resource.tenant_id)
|
||||
resource.clear_tags()
|
||||
|
||||
assert len(resource.tags.filter(tenant_id=tenant_id)) == 0
|
||||
assert resource.get_tags(tenant_id=tenant_id) == {}
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
|
||||
@@ -1,411 +0,0 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
from api.models import (
|
||||
Membership,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.v1.serializers import TokenSerializer
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUserViewSet:
|
||||
def test_list_users_with_all_permissions(self, authenticated_client_rbac):
|
||||
response = authenticated_client_rbac.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert isinstance(response.json()["data"], list)
|
||||
|
||||
def test_list_users_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_retrieve_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert (
|
||||
response.json()["data"]["attributes"]["email"]
|
||||
== create_test_user_rbac.email
|
||||
)
|
||||
|
||||
def test_retrieve_user_with_no_roles(
|
||||
self, authenticated_client_rbac_noroles, create_test_user_rbac_no_roles
|
||||
):
|
||||
response = authenticated_client_rbac_noroles.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac_no_roles.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_retrieve_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
|
||||
|
||||
def test_create_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
|
||||
|
||||
def test_partial_update_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
updated_data = {
|
||||
"data": {
|
||||
"type": "users",
|
||||
"id": str(create_test_user_rbac.id),
|
||||
"attributes": {"name": "Updated Name"},
|
||||
},
|
||||
}
|
||||
response = authenticated_client_rbac.patch(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id}),
|
||||
data=updated_data,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["name"] == "Updated Name"
|
||||
|
||||
def test_partial_update_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
updated_data = {
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {"name": "Updated Name"},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.patch(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id}),
|
||||
data=updated_data,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_delete_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.delete(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
def test_delete_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.delete(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_me_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert (
|
||||
response.json()["data"]["attributes"]["email"]
|
||||
== create_test_user_rbac.email
|
||||
)
|
||||
|
||||
def test_me_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["email"] == "rbac_limited@rbac.com"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProviderViewSet:
|
||||
def test_list_providers_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("provider-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == len(providers_fixture)
|
||||
|
||||
def test_list_providers_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("provider-list")
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
def test_retrieve_provider_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["alias"] == provider.alias
|
||||
|
||||
def test_retrieve_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_create_provider_with_all_permissions(self, authenticated_client_rbac):
|
||||
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("provider-list"), data=payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["alias"] == "new_alias"
|
||||
|
||||
def test_create_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("provider-list"), data=payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_partial_update_provider_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"id": provider.id,
|
||||
"attributes": {"alias": "updated_alias"},
|
||||
},
|
||||
}
|
||||
response = authenticated_client_rbac.patch(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id}),
|
||||
data=payload,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["alias"] == "updated_alias"
|
||||
|
||||
def test_partial_update_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
update_payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {"alias": "updated_alias"},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.patch(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id}),
|
||||
data=update_payload,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.delete_provider_task.delay")
|
||||
def test_delete_provider_with_all_permissions(
|
||||
self,
|
||||
mock_delete_task,
|
||||
mock_task_get,
|
||||
authenticated_client_rbac,
|
||||
providers_fixture,
|
||||
tasks_fixture,
|
||||
):
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
mock_delete_task.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
provider1, *_ = providers_fixture
|
||||
response = authenticated_client_rbac.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider1.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_202_ACCEPTED
|
||||
mock_delete_task.assert_called_once_with(
|
||||
provider_id=str(provider1.id), tenant_id=ANY
|
||||
)
|
||||
assert "Content-Location" in response.headers
|
||||
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
|
||||
|
||||
def test_delete_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.check_provider_connection_task.delay")
|
||||
def test_connection_with_all_permissions(
|
||||
self,
|
||||
mock_provider_connection,
|
||||
mock_task_get,
|
||||
authenticated_client_rbac,
|
||||
providers_fixture,
|
||||
tasks_fixture,
|
||||
):
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
task_mock.status = "PENDING"
|
||||
mock_provider_connection.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
provider1, *_ = providers_fixture
|
||||
assert provider1.connected is None
|
||||
assert provider1.connection_last_checked_at is None
|
||||
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("provider-connection", kwargs={"pk": provider1.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_202_ACCEPTED
|
||||
mock_provider_connection.assert_called_once_with(
|
||||
provider_id=str(provider1.id), tenant_id=ANY
|
||||
)
|
||||
assert "Content-Location" in response.headers
|
||||
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
|
||||
|
||||
def test_connection_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("provider-connection", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLimitedVisibility:
|
||||
TEST_EMAIL = "rbac@rbac.com"
|
||||
TEST_PASSWORD = "thisisapassword123"
|
||||
|
||||
@pytest.fixture
|
||||
def limited_admin_user(
|
||||
self, django_db_setup, django_db_blocker, tenants_fixture, providers_fixture
|
||||
):
|
||||
with django_db_blocker.unblock():
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email=self.TEST_EMAIL,
|
||||
password=self.TEST_PASSWORD,
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
role = Role.objects.create(
|
||||
name="limited_visibility",
|
||||
tenant=tenant,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
provider_group = ProviderGroup.objects.create(
|
||||
name="limited_visibility_group",
|
||||
tenant=tenant,
|
||||
)
|
||||
ProviderGroupMembership.objects.create(
|
||||
tenant=tenant,
|
||||
provider=provider,
|
||||
provider_group=provider_group,
|
||||
)
|
||||
|
||||
RoleProviderGroupRelationship.objects.create(
|
||||
tenant=tenant, role=role, provider_group=provider_group
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_limited(
|
||||
self, limited_admin_user, tenants_fixture, client
|
||||
):
|
||||
client.user = limited_admin_user
|
||||
tenant_id = tenants_fixture[0].id
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": self.TEST_EMAIL,
|
||||
"password": self.TEST_PASSWORD,
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
def test_integrations(
|
||||
self, authenticated_client_rbac_limited, integrations_fixture, providers_fixture
|
||||
):
|
||||
# Integration 2 is related to provider1 and provider 2
|
||||
# This user cannot see provider 2
|
||||
integration = integrations_fixture[1]
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("integration-detail", kwargs={"pk": integration.id})
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert integration.providers.count() == 2
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
@@ -143,18 +143,6 @@ class TestProwlerProviderConnectionTest:
|
||||
key="value", provider_id="1234567890", raise_on_exception=False
|
||||
)
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_prowler_provider_connection_test_without_secret(
|
||||
self, mock_return_prowler_provider, providers_fixture
|
||||
):
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
connection = prowler_provider_connection_test(providers_fixture[0])
|
||||
|
||||
assert connection.is_connected is False
|
||||
assert isinstance(connection.error, Provider.secret.RelatedObjectDoesNotExist)
|
||||
assert str(connection.error) == "Provider has no secret."
|
||||
|
||||
|
||||
class TestGetProwlerProviderKwargs:
|
||||
@pytest.mark.parametrize(
|
||||
@@ -285,10 +273,9 @@ class TestValidateInvitation:
|
||||
expired_time = datetime.now(timezone.utc) - timedelta(days=1)
|
||||
invitation.expires_at = expired_time
|
||||
|
||||
with (
|
||||
patch("api.utils.Invitation.objects.using") as mock_using,
|
||||
patch("api.utils.datetime") as mock_datetime,
|
||||
):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using, patch(
|
||||
"api.utils.datetime"
|
||||
) as mock_datetime:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
mock_datetime.now.return_value = datetime.now(timezone.utc)
|
||||
|
||||
@@ -7,12 +7,12 @@ from rest_framework_json_api.serializers import ValidationError
|
||||
from uuid6 import UUID
|
||||
|
||||
from api.uuid_utils import (
|
||||
transform_into_uuid7,
|
||||
datetime_to_uuid7,
|
||||
datetime_from_uuid7,
|
||||
uuid7_start,
|
||||
datetime_to_uuid7,
|
||||
transform_into_uuid7,
|
||||
uuid7_end,
|
||||
uuid7_range,
|
||||
uuid7_start,
|
||||
)
|
||||
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,5 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from allauth.socialaccount.providers.oauth2.client import OAuth2Client
|
||||
from rest_framework.exceptions import NotFound, ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
@@ -13,15 +12,6 @@ from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
|
||||
|
||||
class CustomOAuth2Client(OAuth2Client):
|
||||
def __init__(self, client_id, secret, *args, **kwargs):
|
||||
# Remove any duplicate "scope_delimiter" from kwargs
|
||||
# Bug present in dj-rest-auth after version v7.0.1
|
||||
# https://github.com/iMerica/dj-rest-auth/issues/673
|
||||
kwargs.pop("scope_delimiter", None)
|
||||
super().__init__(client_id, secret, *args, **kwargs)
|
||||
|
||||
|
||||
def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
"""
|
||||
Recursively merge two dictionaries, using `default_dict` as the base and `replacement_dict` for overriding values.
|
||||
@@ -130,10 +120,7 @@ def prowler_provider_connection_test(provider: Provider) -> Connection:
|
||||
Connection: A connection object representing the result of the connection test for the specified provider.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
try:
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
except Provider.secret.RelatedObjectDoesNotExist as secret_error:
|
||||
return Connection(is_connected=False, error=secret_error)
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
return prowler_provider.test_connection(
|
||||
**prowler_provider_kwargs, provider_id=provider.uid, raise_on_exception=False
|
||||
)
|
||||
|
||||
@@ -106,7 +106,7 @@ def uuid7_end(uuid_obj: UUID, offset_months: int = 1) -> UUID:
|
||||
|
||||
Args:
|
||||
uuid_obj: A UUIDv7 object.
|
||||
offset_months: Number of months to offset from the given UUID's date. Defaults to 1 to handle if
|
||||
offset_days: Number of months to offset from the given UUID's date. Defaults to 1 to handle if
|
||||
partitions are not being used, if so the value will be the one set at FINDINGS_TABLE_PARTITION_MONTHS.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,122 +0,0 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class S3ConfigSerializer(BaseValidateSerializer):
|
||||
bucket_name = serializers.CharField()
|
||||
output_directory = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class AWSCredentialSerializer(BaseValidateSerializer):
|
||||
role_arn = serializers.CharField(required=False)
|
||||
external_id = serializers.CharField(required=False)
|
||||
role_session_name = serializers.CharField(required=False)
|
||||
session_duration = serializers.IntegerField(
|
||||
required=False, min_value=900, max_value=43200
|
||||
)
|
||||
aws_access_key_id = serializers.CharField(required=False)
|
||||
aws_secret_access_key = serializers.CharField(required=False)
|
||||
aws_session_token = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Credentials",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
|
||||
"assumption.",
|
||||
},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"description": "An identifier to enhance security for role assumption.",
|
||||
},
|
||||
"aws_access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
|
||||
"AWS credentials.",
|
||||
},
|
||||
"aws_secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
|
||||
"no AWS credentials are pre-configured.",
|
||||
},
|
||||
"aws_session_token": {
|
||||
"type": "string",
|
||||
"description": "The session token for temporary credentials, if applicable.",
|
||||
},
|
||||
"session_duration": {
|
||||
"type": "integer",
|
||||
"minimum": 900,
|
||||
"maximum": 43200,
|
||||
"default": 3600,
|
||||
"description": "The duration (in seconds) for the role session.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
|
||||
"The regex used to validate this parameter is a string of characters consisting of "
|
||||
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
|
||||
"underscores or any of the following characters: =,.@-\n\n"
|
||||
"Examples:\n"
|
||||
"- MySession123\n"
|
||||
"- User_Session-1\n"
|
||||
"- Test.Session@2",
|
||||
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationCredentialField(serializers.JSONField):
|
||||
pass
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Amazon S3",
|
||||
"properties": {
|
||||
"bucket_name": {
|
||||
"type": "string",
|
||||
"description": "The name of the S3 bucket where files will be stored.",
|
||||
},
|
||||
"output_directory": {
|
||||
"type": "string",
|
||||
"description": "The directory path within the bucket where files will be saved.",
|
||||
},
|
||||
},
|
||||
"required": ["bucket_name", "output_directory"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class IntegrationConfigField(serializers.JSONField):
|
||||
pass
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,29 +6,21 @@ from api.v1.views import (
|
||||
ComplianceOverviewViewSet,
|
||||
CustomTokenObtainView,
|
||||
CustomTokenRefreshView,
|
||||
CustomTokenSwitchTenantView,
|
||||
FindingViewSet,
|
||||
GithubSocialLoginView,
|
||||
GoogleSocialLoginView,
|
||||
IntegrationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
InvitationViewSet,
|
||||
MembershipViewSet,
|
||||
OverviewViewSet,
|
||||
ProviderGroupProvidersRelationshipView,
|
||||
ProviderGroupViewSet,
|
||||
ProviderSecretViewSet,
|
||||
ProviderViewSet,
|
||||
ResourceViewSet,
|
||||
RoleProviderGroupRelationshipView,
|
||||
RoleViewSet,
|
||||
ScanViewSet,
|
||||
ScheduleViewSet,
|
||||
SchemaView,
|
||||
TaskViewSet,
|
||||
TenantMembersViewSet,
|
||||
TenantViewSet,
|
||||
UserRoleRelationshipView,
|
||||
UserViewSet,
|
||||
)
|
||||
|
||||
@@ -37,18 +29,16 @@ router = routers.DefaultRouter(trailing_slash=False)
|
||||
router.register(r"users", UserViewSet, basename="user")
|
||||
router.register(r"tenants", TenantViewSet, basename="tenant")
|
||||
router.register(r"providers", ProviderViewSet, basename="provider")
|
||||
router.register(r"provider-groups", ProviderGroupViewSet, basename="providergroup")
|
||||
router.register(r"provider_groups", ProviderGroupViewSet, basename="providergroup")
|
||||
router.register(r"scans", ScanViewSet, basename="scan")
|
||||
router.register(r"tasks", TaskViewSet, basename="task")
|
||||
router.register(r"resources", ResourceViewSet, basename="resource")
|
||||
router.register(r"findings", FindingViewSet, basename="finding")
|
||||
router.register(r"roles", RoleViewSet, basename="role")
|
||||
router.register(
|
||||
r"compliance-overviews", ComplianceOverviewViewSet, basename="complianceoverview"
|
||||
)
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
router.register(r"integrations", IntegrationViewSet, basename="integration")
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
@@ -61,7 +51,6 @@ users_router.register(r"memberships", MembershipViewSet, basename="user-membersh
|
||||
urlpatterns = [
|
||||
path("tokens", CustomTokenObtainView.as_view(), name="token-obtain"),
|
||||
path("tokens/refresh", CustomTokenRefreshView.as_view(), name="token-refresh"),
|
||||
path("tokens/switch", CustomTokenSwitchTenantView.as_view(), name="token-switch"),
|
||||
path(
|
||||
"providers/secrets",
|
||||
ProviderSecretViewSet.as_view({"get": "list", "post": "create"}),
|
||||
@@ -91,29 +80,6 @@ urlpatterns = [
|
||||
InvitationAcceptViewSet.as_view({"post": "accept"}),
|
||||
name="invitation-accept",
|
||||
),
|
||||
path(
|
||||
"roles/<uuid:pk>/relationships/provider_groups",
|
||||
RoleProviderGroupRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="role-provider-groups-relationship",
|
||||
),
|
||||
path(
|
||||
"users/<uuid:pk>/relationships/roles",
|
||||
UserRoleRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="user-roles-relationship",
|
||||
),
|
||||
path(
|
||||
"provider-groups/<uuid:pk>/relationships/providers",
|
||||
ProviderGroupProvidersRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="provider_group-providers-relationship",
|
||||
),
|
||||
path("tokens/google", GoogleSocialLoginView.as_view(), name="token-google"),
|
||||
path("tokens/github", GithubSocialLoginView.as_view(), name="token-github"),
|
||||
path("", include(router.urls)),
|
||||
path("", include(tenants_router.urls)),
|
||||
path("", include(users_router.urls)),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,21 +1,10 @@
|
||||
from celery import Celery, Task
|
||||
from config.env import env
|
||||
|
||||
BROKER_VISIBILITY_TIMEOUT = env.int("DJANGO_BROKER_VISIBILITY_TIMEOUT", default=86400)
|
||||
|
||||
celery_app = Celery("tasks")
|
||||
|
||||
celery_app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
celery_app.conf.update(result_extended=True, result_expires=None)
|
||||
|
||||
celery_app.conf.broker_transport_options = {
|
||||
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
|
||||
}
|
||||
celery_app.conf.result_backend_transport_options = {
|
||||
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
|
||||
}
|
||||
celery_app.conf.visibility_timeout = BROKER_VISIBILITY_TIMEOUT
|
||||
|
||||
celery_app.autodiscover_tasks(["api"])
|
||||
|
||||
|
||||
@@ -46,13 +35,13 @@ class RLSTask(Task):
|
||||
**options,
|
||||
)
|
||||
task_result_instance = TaskResult.objects.get(task_id=result.task_id)
|
||||
from api.db_utils import rls_transaction
|
||||
from api.db_utils import tenant_transaction
|
||||
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
with rls_transaction(tenant_id):
|
||||
APITask.objects.update_or_create(
|
||||
with tenant_transaction(tenant_id):
|
||||
APITask.objects.create(
|
||||
id=task_result_instance.task_id,
|
||||
tenant_id=tenant_id,
|
||||
defaults={"task_runner_task": task_result_instance},
|
||||
task_runner_task=task_result_instance,
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -4,8 +4,6 @@ from config.custom_logging import LOGGING # noqa
|
||||
from config.env import BASE_DIR, env # noqa
|
||||
from config.settings.celery import * # noqa
|
||||
from config.settings.partitions import * # noqa
|
||||
from config.settings.sentry import * # noqa
|
||||
from config.settings.social_login import * # noqa
|
||||
|
||||
SECRET_KEY = env("SECRET_KEY", default="secret")
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
@@ -31,13 +29,6 @@ INSTALLED_APPS = [
|
||||
"django_celery_results",
|
||||
"django_celery_beat",
|
||||
"rest_framework_simplejwt.token_blacklist",
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"allauth.socialaccount.providers.google",
|
||||
"allauth.socialaccount.providers.github",
|
||||
"dj_rest_auth.registration",
|
||||
"rest_framework.authtoken",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
@@ -51,11 +42,8 @@ MIDDLEWARE = [
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"api.middleware.APILoggingMiddleware",
|
||||
"allauth.account.middleware.AccountMiddleware",
|
||||
]
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
CORS_ALLOWED_ORIGINS = ["http://localhost", "http://127.0.0.1"]
|
||||
|
||||
ROOT_URLCONF = "config.urls"
|
||||
@@ -219,27 +207,3 @@ CACHE_STALE_WHILE_REVALIDATE = env.int("DJANGO_STALE_WHILE_REVALIDATE", 60)
|
||||
|
||||
|
||||
TESTING = False
|
||||
|
||||
FINDINGS_MAX_DAYS_IN_RANGE = env.int("DJANGO_FINDINGS_MAX_DAYS_IN_RANGE", 7)
|
||||
|
||||
|
||||
# API export settings
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY = env.str(
|
||||
"DJANGO_TMP_OUTPUT_DIRECTORY", "/tmp/prowler_api_output"
|
||||
)
|
||||
DJANGO_FINDINGS_BATCH_SIZE = env.str("DJANGO_FINDINGS_BATCH_SIZE", 1000)
|
||||
|
||||
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
|
||||
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID = env.str("DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID", "")
|
||||
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = env.str(
|
||||
"DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY", ""
|
||||
)
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = env.str("DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN", "")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = env.str("DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION", "")
|
||||
|
||||
# HTTP Security Headers
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
X_FRAME_OPTIONS = "DENY"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
@@ -9,8 +9,8 @@ DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": "prowler_db_test",
|
||||
"USER": env("POSTGRES_USER", default="prowler_admin"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="postgres"),
|
||||
"USER": env("POSTGRES_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="S3cret"),
|
||||
"HOST": env("POSTGRES_HOST", default="localhost"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
|
||||
@@ -11,8 +11,9 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
|
||||
import django # noqa: E402
|
||||
|
||||
django.setup()
|
||||
from config.django.production import LOGGING as DJANGO_LOGGERS, DEBUG # noqa: E402
|
||||
from config.custom_logging import BackendLogger # noqa: E402
|
||||
from config.django.production import DEBUG
|
||||
from config.django.production import LOGGING as DJANGO_LOGGERS # noqa: E402
|
||||
|
||||
BIND_ADDRESS = env("DJANGO_BIND_ADDRESS", default="127.0.0.1")
|
||||
PORT = env("DJANGO_PORT", default=8000)
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
import sentry_sdk
|
||||
from config.env import env
|
||||
|
||||
IGNORED_EXCEPTIONS = [
|
||||
# Provider is not connected due to credentials errors
|
||||
"is not connected",
|
||||
# Authentication Errors from AWS
|
||||
"InvalidToken",
|
||||
"AccessDeniedException",
|
||||
"AuthorizationErrorException",
|
||||
"UnrecognizedClientException",
|
||||
"UnauthorizedOperation",
|
||||
"AuthFailure",
|
||||
"InvalidClientTokenId",
|
||||
"AccessDenied",
|
||||
"No Shodan API Key", # Shodan Check
|
||||
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
|
||||
"ThrottlingException",
|
||||
"Rate exceeded",
|
||||
"SubscriptionRequiredException",
|
||||
"UnknownOperationException",
|
||||
"OptInRequired",
|
||||
"ReadTimeout",
|
||||
"LimitExceeded",
|
||||
"ConnectTimeoutError",
|
||||
"ExpiredToken",
|
||||
"IncompleteSignature",
|
||||
"RegionDisabledException",
|
||||
"TooManyRequestsException",
|
||||
"SignatureDoesNotMatch",
|
||||
"InvalidParameterValueException",
|
||||
"InvalidInputException",
|
||||
"ValidationException",
|
||||
"AWSSecretAccessKeyInvalidError",
|
||||
"InvalidAction",
|
||||
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
# Authentication Errors from GCP
|
||||
"ClientAuthenticationError",
|
||||
"AuthorizationFailed",
|
||||
"Reauthentication is needed",
|
||||
"Permission denied to get service",
|
||||
"API has not been used in project",
|
||||
"HttpError 404 when requesting",
|
||||
"GCPNoAccesibleProjectsError",
|
||||
# Authentication Errors from Azure
|
||||
"ClientAuthenticationError",
|
||||
"AuthorizationFailed",
|
||||
"Subscription Not Registered",
|
||||
"AzureNotValidClientIdError",
|
||||
"AzureNotValidClientSecretError",
|
||||
"AzureNotValidTenantIdError",
|
||||
"AzureTenantIdAndClientSecretNotBelongingToClientIdError",
|
||||
"AzureTenantIdAndClientIdNotBelongingToClientSecretError",
|
||||
"AzureClientIdAndClientSecretNotBelongingToTenantIdError",
|
||||
"AzureHTTPResponseError",
|
||||
"Error with credentials provided",
|
||||
# AWS Service is not available in a region
|
||||
"EndpointConnectionError",
|
||||
]
|
||||
|
||||
|
||||
def before_send(event, hint):
|
||||
"""
|
||||
before_send handles the Sentry events in order to sent them or not
|
||||
"""
|
||||
# Ignore logs with the ignored_exceptions
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-objects
|
||||
if "log_record" in hint:
|
||||
log_msg = hint["log_record"].msg
|
||||
log_lvl = hint["log_record"].levelno
|
||||
|
||||
# Handle Error events and discard the rest
|
||||
if log_lvl == 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
|
||||
return
|
||||
return event
|
||||
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=env.str("DJANGO_SENTRY_DSN", ""),
|
||||
# Add data like request headers and IP for users,
|
||||
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
|
||||
before_send=before_send,
|
||||
send_default_pii=True,
|
||||
_experiments={
|
||||
# Set continuous_profiling_auto_start to True
|
||||
# to automatically start the profiler on when
|
||||
# possible.
|
||||
"continuous_profiling_auto_start": True,
|
||||
},
|
||||
)
|
||||
@@ -1,53 +0,0 @@
|
||||
from config.env import env
|
||||
|
||||
# Provider Oauth settings
|
||||
GOOGLE_OAUTH_CLIENT_ID = env("SOCIAL_GOOGLE_OAUTH_CLIENT_ID", default="")
|
||||
GOOGLE_OAUTH_CLIENT_SECRET = env("SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET", default="")
|
||||
GOOGLE_OAUTH_CALLBACK_URL = env("SOCIAL_GOOGLE_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
GITHUB_OAUTH_CLIENT_ID = env("SOCIAL_GITHUB_OAUTH_CLIENT_ID", default="")
|
||||
GITHUB_OAUTH_CLIENT_SECRET = env("SOCIAL_GITHUB_OAUTH_CLIENT_SECRET", default="")
|
||||
GITHUB_OAUTH_CALLBACK_URL = env("SOCIAL_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
# Allauth settings
|
||||
ACCOUNT_LOGIN_METHODS = {"email"} # Use Email / Password authentication
|
||||
ACCOUNT_USERNAME_REQUIRED = False
|
||||
ACCOUNT_EMAIL_REQUIRED = True
|
||||
ACCOUNT_EMAIL_VERIFICATION = "none" # Do not require email confirmation
|
||||
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
|
||||
REST_AUTH = {
|
||||
"TOKEN_MODEL": None,
|
||||
"REST_USE_JWT": True,
|
||||
}
|
||||
# django-allauth (social)
|
||||
# Authenticate if local account with this email address already exists
|
||||
SOCIALACCOUNT_EMAIL_AUTHENTICATION = True
|
||||
# Connect local account and social account if local account with that email address already exists
|
||||
SOCIALACCOUNT_EMAIL_AUTHENTICATION_AUTO_CONNECT = True
|
||||
SOCIALACCOUNT_ADAPTER = "api.adapters.ProwlerSocialAccountAdapter"
|
||||
SOCIALACCOUNT_PROVIDERS = {
|
||||
"google": {
|
||||
"APP": {
|
||||
"client_id": GOOGLE_OAUTH_CLIENT_ID,
|
||||
"secret": GOOGLE_OAUTH_CLIENT_SECRET,
|
||||
"key": "",
|
||||
},
|
||||
"SCOPE": [
|
||||
"email",
|
||||
"profile",
|
||||
],
|
||||
"AUTH_PARAMS": {
|
||||
"access_type": "online",
|
||||
},
|
||||
},
|
||||
"github": {
|
||||
"APP": {
|
||||
"client_id": GITHUB_OAUTH_CLIENT_ID,
|
||||
"secret": GITHUB_OAUTH_CLIENT_SECRET,
|
||||
},
|
||||
"SCOPE": [
|
||||
"user",
|
||||
"read:org",
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
@@ -11,12 +10,9 @@ from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
Membership,
|
||||
Provider,
|
||||
@@ -24,13 +20,10 @@ from api.models import (
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
Task,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.v1.serializers import TokenSerializer
|
||||
@@ -89,150 +82,8 @@ def create_test_user(django_db_setup, django_db_blocker):
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac(django_db_setup, django_db_blocker, tenants_fixture):
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email="rbac@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = tenants_fixture[0]
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
Role.objects.create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=Role.objects.get(name="admin"),
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac_no_roles(django_db_setup, django_db_blocker, tenants_fixture):
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email="rbac_noroles@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = tenants_fixture[0]
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac_limited(django_db_setup, django_db_blocker):
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing_limited",
|
||||
email="rbac_limited@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = Tenant.objects.create(
|
||||
name="Tenant Test",
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
Role.objects.create(
|
||||
name="limited",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=Role.objects.get(name="limited"),
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac(create_test_user_rbac, tenants_fixture, client):
|
||||
client.user = create_test_user_rbac
|
||||
tenant_id = tenants_fixture[0].id
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_noroles(
|
||||
create_test_user_rbac_no_roles, tenants_fixture, client
|
||||
):
|
||||
client.user = create_test_user_rbac_no_roles
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac_noroles@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_no_permissions_rbac(
|
||||
create_test_user_rbac_limited, tenants_fixture, client
|
||||
):
|
||||
client.user = create_test_user_rbac_limited
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac_limited@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client(
|
||||
create_test_user, tenants_fixture, set_user_admin_roles_fixture, client
|
||||
):
|
||||
def authenticated_client(create_test_user, tenants_fixture, client):
|
||||
client.user = create_test_user
|
||||
serializer = TokenSerializer(
|
||||
data={"type": "tokens", "email": TEST_USER, "password": TEST_PASSWORD}
|
||||
@@ -252,7 +103,6 @@ def authenticated_api_client(create_test_user, tenants_fixture):
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
|
||||
return client
|
||||
|
||||
|
||||
@@ -277,37 +127,13 @@ def tenants_fixture(create_test_user):
|
||||
tenant3 = Tenant.objects.create(
|
||||
name="Tenant Three",
|
||||
)
|
||||
|
||||
return tenant1, tenant2, tenant3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def set_user_admin_roles_fixture(create_test_user, tenants_fixture):
|
||||
user = create_test_user
|
||||
for tenant in tenants_fixture[:2]:
|
||||
with rls_transaction(str(tenant.id)):
|
||||
role = Role.objects.create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def invitations_fixture(create_test_user, tenants_fixture):
|
||||
user = create_test_user
|
||||
tenant = tenants_fixture[0]
|
||||
*_, tenant = tenants_fixture
|
||||
valid_invitation = Invitation.objects.create(
|
||||
email="testing@prowler.com",
|
||||
state=Invitation.State.PENDING,
|
||||
@@ -326,20 +152,6 @@ def invitations_fixture(create_test_user, tenants_fixture):
|
||||
return valid_invitation, expired_invitation
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def users_fixture(django_user_model):
|
||||
user1 = User.objects.create_user(
|
||||
name="user1", email="test_unit0@prowler.com", password="S3cret"
|
||||
)
|
||||
user2 = User.objects.create_user(
|
||||
name="user2", email="test_unit1@prowler.com", password="S3cret"
|
||||
)
|
||||
user3 = User.objects.create_user(
|
||||
name="user3", email="test_unit2@prowler.com", password="S3cret"
|
||||
)
|
||||
return user1, user2, user3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def providers_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
@@ -397,74 +209,6 @@ def provider_groups_fixture(tenants_fixture):
|
||||
return pgroup1, pgroup2, pgroup3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def admin_role_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
|
||||
return Role.objects.get_or_create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def roles_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
role1 = Role.objects.create(
|
||||
name="Role One",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=False,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
role2 = Role.objects.create(
|
||||
name="Role Two",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
role3 = Role.objects.create(
|
||||
name="Role Three",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
role4 = Role.objects.create(
|
||||
name="Role Four",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
|
||||
return role1, role2, role3, role4
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def provider_secret_fixture(providers_fixture):
|
||||
return tuple(
|
||||
@@ -488,7 +232,7 @@ def scans_fixture(tenants_fixture, providers_fixture):
|
||||
name="Scan 1",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
tenant_id=tenant.id,
|
||||
started_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
@@ -628,7 +372,6 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
|
||||
finding1.add_resources([resource1])
|
||||
@@ -654,8 +397,6 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description orange juice",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
muted=True,
|
||||
)
|
||||
|
||||
finding2.add_resources([resource2])
|
||||
@@ -801,141 +542,5 @@ def get_api_tokens(
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def scan_summaries_fixture(tenants_fixture, providers_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
scan = Scan.objects.create(
|
||||
name="overview scan",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
check_id="check1",
|
||||
service="service1",
|
||||
severity="high",
|
||||
region="region1",
|
||||
_pass=1,
|
||||
fail=0,
|
||||
muted=0,
|
||||
total=1,
|
||||
new=1,
|
||||
changed=0,
|
||||
unchanged=0,
|
||||
fail_new=0,
|
||||
fail_changed=0,
|
||||
pass_new=1,
|
||||
pass_changed=0,
|
||||
muted_new=0,
|
||||
muted_changed=0,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
check_id="check1",
|
||||
service="service1",
|
||||
severity="high",
|
||||
region="region2",
|
||||
_pass=0,
|
||||
fail=1,
|
||||
muted=1,
|
||||
total=2,
|
||||
new=2,
|
||||
changed=0,
|
||||
unchanged=0,
|
||||
fail_new=1,
|
||||
fail_changed=0,
|
||||
pass_new=0,
|
||||
pass_changed=0,
|
||||
muted_new=1,
|
||||
muted_changed=0,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
check_id="check2",
|
||||
service="service2",
|
||||
severity="critical",
|
||||
region="region1",
|
||||
_pass=1,
|
||||
fail=0,
|
||||
muted=0,
|
||||
total=1,
|
||||
new=1,
|
||||
changed=0,
|
||||
unchanged=0,
|
||||
fail_new=0,
|
||||
fail_changed=0,
|
||||
pass_new=1,
|
||||
pass_changed=0,
|
||||
muted_new=0,
|
||||
muted_changed=0,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def integrations_fixture(providers_fixture):
|
||||
provider1, provider2, *_ = providers_fixture
|
||||
tenant_id = provider1.tenant_id
|
||||
integration1 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration1,
|
||||
provider=provider1,
|
||||
)
|
||||
|
||||
integration2 = Integration.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider1,
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
integration=integration2,
|
||||
provider=provider2,
|
||||
)
|
||||
|
||||
return integration1, integration2
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(items):
|
||||
"""Ensure test_rbac.py is executed first."""
|
||||
items.sort(key=lambda item: 0 if "test_rbac.py" in item.nodeid else 1)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
# Apply the mock before the test session starts. This is necessary to avoid admin error when running the
|
||||
# 0004_rbac_missing_admin_roles migration
|
||||
patch("api.db_router.MainRouter.admin_db", new="default").start()
|
||||
|
||||
|
||||
def pytest_unconfigure(config):
|
||||
# Stop all patches after the test session ends. This is necessary to avoid admin error when running the
|
||||
# 0004_rbac_missing_admin_roles migration
|
||||
patch.stopall()
|
||||
|
||||
@@ -1,18 +1,13 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.tasks import perform_scheduled_scan_task
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Provider, Scan, StateChoices
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
def schedule_provider_scan(provider_instance: Provider):
|
||||
tenant_id = str(provider_instance.tenant_id)
|
||||
provider_id = str(provider_instance.id)
|
||||
|
||||
schedule, _ = IntervalSchedule.objects.get_or_create(
|
||||
every=24,
|
||||
period=IntervalSchedule.HOURS,
|
||||
@@ -21,9 +16,20 @@ def schedule_provider_scan(provider_instance: Provider):
|
||||
# Create a unique name for the periodic task
|
||||
task_name = f"scan-perform-scheduled-{provider_instance.id}"
|
||||
|
||||
if PeriodicTask.objects.filter(
|
||||
interval=schedule, name=task_name, task="scan-perform-scheduled"
|
||||
).exists():
|
||||
# Schedule the task
|
||||
_, created = PeriodicTask.objects.get_or_create(
|
||||
interval=schedule,
|
||||
name=task_name,
|
||||
task="scan-perform-scheduled",
|
||||
kwargs=json.dumps(
|
||||
{
|
||||
"tenant_id": str(provider_instance.tenant_id),
|
||||
"provider_id": str(provider_instance.id),
|
||||
}
|
||||
),
|
||||
one_off=False,
|
||||
)
|
||||
if not created:
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
@@ -35,36 +41,9 @@ def schedule_provider_scan(provider_instance: Provider):
|
||||
]
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
name="Daily scheduled scan",
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduled_at=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
# Schedule the task
|
||||
periodic_task_instance = PeriodicTask.objects.create(
|
||||
interval=schedule,
|
||||
name=task_name,
|
||||
task="scan-perform-scheduled",
|
||||
kwargs=json.dumps(
|
||||
{
|
||||
"tenant_id": tenant_id,
|
||||
"provider_id": provider_id,
|
||||
}
|
||||
),
|
||||
one_off=False,
|
||||
start_time=datetime.now(timezone.utc) + timedelta(hours=24),
|
||||
)
|
||||
scheduled_scan.scheduler_task_id = periodic_task_instance.id
|
||||
scheduled_scan.save()
|
||||
|
||||
return perform_scheduled_scan_task.apply_async(
|
||||
kwargs={
|
||||
"tenant_id": str(provider_instance.tenant_id),
|
||||
"provider_id": provider_id,
|
||||
"provider_id": str(provider_instance.id),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,19 +1,17 @@
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.db import DatabaseError
|
||||
from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import batch_delete, rls_transaction
|
||||
from api.models import Finding, Provider, Resource, Scan, ScanSummary, Tenant
|
||||
from api.db_utils import batch_delete
|
||||
from api.models import Finding, Provider, Resource, Scan
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def delete_provider(tenant_id: str, pk: str):
|
||||
def delete_provider(pk: str):
|
||||
"""
|
||||
Gracefully deletes an instance of a provider along with its related data.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the resources belong to.
|
||||
pk (str): The primary key of the Provider instance to delete.
|
||||
|
||||
Returns:
|
||||
@@ -23,51 +21,26 @@ def delete_provider(tenant_id: str, pk: str):
|
||||
Raises:
|
||||
Provider.DoesNotExist: If no instance with the provided primary key exists.
|
||||
"""
|
||||
with rls_transaction(tenant_id):
|
||||
instance = Provider.all_objects.get(pk=pk)
|
||||
deletion_summary = {}
|
||||
deletion_steps = [
|
||||
("Scan Summaries", ScanSummary.all_objects.filter(scan__provider=instance)),
|
||||
("Findings", Finding.all_objects.filter(scan__provider=instance)),
|
||||
("Resources", Resource.all_objects.filter(provider=instance)),
|
||||
("Scans", Scan.all_objects.filter(provider=instance)),
|
||||
]
|
||||
|
||||
for step_name, queryset in deletion_steps:
|
||||
try:
|
||||
_, step_summary = batch_delete(tenant_id, queryset)
|
||||
deletion_summary.update(step_summary)
|
||||
except DatabaseError as db_error:
|
||||
logger.error(f"Error deleting {step_name}: {db_error}")
|
||||
raise
|
||||
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
_, provider_summary = instance.delete()
|
||||
deletion_summary.update(provider_summary)
|
||||
except DatabaseError as db_error:
|
||||
logger.error(f"Error deleting Provider: {db_error}")
|
||||
raise
|
||||
return deletion_summary
|
||||
|
||||
|
||||
def delete_tenant(pk: str):
|
||||
"""
|
||||
Gracefully deletes an instance of a tenant along with its related data.
|
||||
|
||||
Args:
|
||||
pk (str): The primary key of the Tenant instance to delete.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary with the count of deleted objects per model,
|
||||
including related models.
|
||||
"""
|
||||
instance = Provider.all_objects.get(pk=pk)
|
||||
deletion_summary = {}
|
||||
|
||||
for provider in Provider.objects.using(MainRouter.admin_db).filter(tenant_id=pk):
|
||||
summary = delete_provider(pk, provider.id)
|
||||
deletion_summary.update(summary)
|
||||
with transaction.atomic():
|
||||
# Delete Findings
|
||||
findings_qs = Finding.all_objects.filter(scan__provider=instance)
|
||||
_, findings_summary = batch_delete(findings_qs)
|
||||
deletion_summary.update(findings_summary)
|
||||
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(id=pk).delete()
|
||||
# Delete Resources
|
||||
resources_qs = Resource.all_objects.filter(provider=instance)
|
||||
_, resources_summary = batch_delete(resources_qs)
|
||||
deletion_summary.update(resources_summary)
|
||||
|
||||
# Delete Scans
|
||||
scans_qs = Scan.all_objects.filter(provider=instance)
|
||||
_, scans_summary = batch_delete(scans_qs)
|
||||
deletion_summary.update(scans_summary)
|
||||
|
||||
provider_deleted_count, provider_summary = instance.delete()
|
||||
deletion_summary.update(provider_summary)
|
||||
|
||||
return deletion_summary
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
import os
|
||||
import zipfile
|
||||
|
||||
import boto3
|
||||
import config.django.base as base
|
||||
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.conf import settings
|
||||
|
||||
from prowler.config.config import (
|
||||
csv_file_suffix,
|
||||
html_file_suffix,
|
||||
json_ocsf_file_suffix,
|
||||
output_file_timestamp,
|
||||
)
|
||||
from prowler.lib.outputs.csv.csv import CSV
|
||||
from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
# Predefined mapping for output formats and their configurations
|
||||
OUTPUT_FORMATS_MAPPING = {
|
||||
"csv": {
|
||||
"class": CSV,
|
||||
"suffix": csv_file_suffix,
|
||||
"kwargs": {},
|
||||
},
|
||||
"json-ocsf": {"class": OCSF, "suffix": json_ocsf_file_suffix, "kwargs": {}},
|
||||
"html": {"class": HTML, "suffix": html_file_suffix, "kwargs": {"stats": {}}},
|
||||
}
|
||||
|
||||
|
||||
def _compress_output_files(output_directory: str) -> str:
|
||||
"""
|
||||
Compress output files from all configured output formats into a ZIP archive.
|
||||
Args:
|
||||
output_directory (str): The directory where the output files are located.
|
||||
The function looks up all known suffixes in OUTPUT_FORMATS_MAPPING
|
||||
and compresses those files into a single ZIP.
|
||||
Returns:
|
||||
str: The full path to the newly created ZIP archive.
|
||||
"""
|
||||
zip_path = f"{output_directory}.zip"
|
||||
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for suffix in [config["suffix"] for config in OUTPUT_FORMATS_MAPPING.values()]:
|
||||
zipf.write(
|
||||
f"{output_directory}{suffix}",
|
||||
f"output/{output_directory.split('/')[-1]}{suffix}",
|
||||
)
|
||||
|
||||
return zip_path
|
||||
|
||||
|
||||
def get_s3_client():
|
||||
"""
|
||||
Create and return a boto3 S3 client using AWS credentials from environment variables.
|
||||
|
||||
This function attempts to initialize an S3 client by reading the AWS access key, secret key,
|
||||
session token, and region from environment variables. It then validates the client by listing
|
||||
available S3 buckets. If an error occurs during this process (for example, due to missing or
|
||||
invalid credentials), it falls back to creating an S3 client without explicitly provided credentials,
|
||||
which may rely on other configuration sources (e.g., IAM roles).
|
||||
|
||||
Returns:
|
||||
boto3.client: A configured S3 client instance.
|
||||
|
||||
Raises:
|
||||
ClientError, NoCredentialsError, or ParamValidationError if both attempts to create a client fail.
|
||||
"""
|
||||
s3_client = None
|
||||
try:
|
||||
s3_client = boto3.client(
|
||||
"s3",
|
||||
aws_access_key_id=settings.DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY,
|
||||
aws_session_token=settings.DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN,
|
||||
region_name=settings.DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION,
|
||||
)
|
||||
s3_client.list_buckets()
|
||||
except (ClientError, NoCredentialsError, ParamValidationError, ValueError):
|
||||
s3_client = boto3.client("s3")
|
||||
s3_client.list_buckets()
|
||||
|
||||
return s3_client
|
||||
|
||||
|
||||
def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str:
|
||||
"""
|
||||
Upload the specified ZIP file to an S3 bucket.
|
||||
If the S3 bucket environment variables are not configured,
|
||||
the function returns None without performing an upload.
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier, used as part of the S3 key prefix.
|
||||
zip_path (str): The local file system path to the ZIP file to be uploaded.
|
||||
scan_id (str): The scan identifier, used as part of the S3 key prefix.
|
||||
Returns:
|
||||
str: The S3 URI of the uploaded file (e.g., "s3://<bucket>/<key>") if successful.
|
||||
None: If the required environment variables for the S3 bucket are not set.
|
||||
Raises:
|
||||
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
|
||||
"""
|
||||
if not base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET:
|
||||
return
|
||||
|
||||
try:
|
||||
s3 = get_s3_client()
|
||||
s3_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
s3.upload_file(
|
||||
Filename=zip_path,
|
||||
Bucket=base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET,
|
||||
Key=s3_key,
|
||||
)
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{s3_key}"
|
||||
except (ClientError, NoCredentialsError, ParamValidationError, ValueError) as e:
|
||||
logger.error(f"S3 upload failed: {str(e)}")
|
||||
|
||||
|
||||
def _generate_output_directory(
|
||||
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
|
||||
) -> str:
|
||||
"""
|
||||
Generate a file system path for the output directory of a prowler scan.
|
||||
|
||||
This function constructs the output directory path by combining a base
|
||||
temporary output directory, the tenant ID, the scan ID, and details about
|
||||
the prowler provider along with a timestamp. The resulting path is used to
|
||||
store the output files of a prowler scan.
|
||||
|
||||
Note:
|
||||
This function depends on one external variable:
|
||||
- `output_file_timestamp`: A timestamp (as a string) used to uniquely identify the output.
|
||||
|
||||
Args:
|
||||
output_directory (str): The base output directory.
|
||||
prowler_provider (object): An identifier or descriptor for the prowler provider.
|
||||
Typically, this is a string indicating the provider (e.g., "aws").
|
||||
tenant_id (str): The unique identifier for the tenant.
|
||||
scan_id (str): The unique identifier for the scan.
|
||||
|
||||
Returns:
|
||||
str: The constructed file system path for the prowler scan output directory.
|
||||
|
||||
Example:
|
||||
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/prowler-output-"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
@@ -7,13 +6,12 @@ from celery.utils.log import get_task_logger
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
from django.db import IntegrityError, OperationalError
|
||||
from django.db.models import Case, Count, IntegerField, Sum, When
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_utils import rls_transaction
|
||||
from api.db_utils import tenant_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
@@ -71,7 +69,7 @@ def _store_resources(
|
||||
- tuple[str, str]: A tuple containing the resource UID and region.
|
||||
|
||||
"""
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
resource_instance, created = Resource.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_instance,
|
||||
@@ -88,7 +86,7 @@ def _store_resources(
|
||||
resource_instance.service = finding.service_name
|
||||
resource_instance.type = finding.resource_type
|
||||
resource_instance.save()
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
tags = [
|
||||
ResourceTag.objects.get_or_create(
|
||||
tenant_id=tenant_id, key=key, value=value
|
||||
@@ -118,12 +116,13 @@ def perform_prowler_scan(
|
||||
ValueError: If the provider cannot be connected.
|
||||
|
||||
"""
|
||||
generate_compliance = False
|
||||
check_status_by_region = {}
|
||||
exception = None
|
||||
unique_resources = set()
|
||||
start_time = time.time()
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
scan_instance = Scan.objects.get(pk=scan_id)
|
||||
scan_instance.state = StateChoices.EXECUTING
|
||||
@@ -131,7 +130,7 @@ def perform_prowler_scan(
|
||||
scan_instance.save()
|
||||
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
try:
|
||||
prowler_provider = initialize_prowler_provider(provider_instance)
|
||||
provider_instance.connected = True
|
||||
@@ -146,6 +145,7 @@ def perform_prowler_scan(
|
||||
)
|
||||
provider_instance.save()
|
||||
|
||||
generate_compliance = provider_instance.provider != Provider.ProviderChoices.GCP
|
||||
prowler_scan = ProwlerScan(provider=prowler_provider, checks=checks_to_execute)
|
||||
|
||||
resource_cache = {}
|
||||
@@ -154,12 +154,9 @@ def perform_prowler_scan(
|
||||
|
||||
for progress, findings in prowler_scan.scan():
|
||||
for finding in findings:
|
||||
if finding is None:
|
||||
logger.error(f"None finding detected on scan {scan_id}.")
|
||||
continue
|
||||
for attempt in range(CELERY_DEADLOCK_ATTEMPTS):
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
# Process resource
|
||||
resource_uid = finding.resource_uid
|
||||
if resource_uid not in resource_cache:
|
||||
@@ -181,10 +178,7 @@ def perform_prowler_scan(
|
||||
|
||||
# Update resource fields if necessary
|
||||
updated_fields = []
|
||||
if (
|
||||
finding.region
|
||||
and resource_instance.region != finding.region
|
||||
):
|
||||
if resource_instance.region != finding.region:
|
||||
resource_instance.region = finding.region
|
||||
updated_fields.append("region")
|
||||
if resource_instance.service != finding.service_name:
|
||||
@@ -193,19 +187,8 @@ def perform_prowler_scan(
|
||||
if resource_instance.type != finding.resource_type:
|
||||
resource_instance.type = finding.resource_type
|
||||
updated_fields.append("type")
|
||||
if resource_instance.metadata != finding.resource_metadata:
|
||||
resource_instance.metadata = json.dumps(
|
||||
finding.resource_metadata, cls=CustomEncoder
|
||||
)
|
||||
updated_fields.append("metadata")
|
||||
if resource_instance.details != finding.resource_details:
|
||||
resource_instance.details = finding.resource_details
|
||||
updated_fields.append("details")
|
||||
if resource_instance.partition != finding.partition:
|
||||
resource_instance.partition = finding.partition
|
||||
updated_fields.append("partition")
|
||||
if updated_fields:
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
resource_instance.save(update_fields=updated_fields)
|
||||
except (OperationalError, IntegrityError) as db_err:
|
||||
if attempt < CELERY_DEADLOCK_ATTEMPTS - 1:
|
||||
@@ -220,7 +203,7 @@ def perform_prowler_scan(
|
||||
|
||||
# Update tags
|
||||
tags = []
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
for key, value in finding.resource_tags.items():
|
||||
tag_key = (key, value)
|
||||
if tag_key not in tag_cache:
|
||||
@@ -236,35 +219,26 @@ def perform_prowler_scan(
|
||||
unique_resources.add((resource_instance.uid, resource_instance.region))
|
||||
|
||||
# Process finding
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
finding_uid = finding.uid
|
||||
last_first_seen_at = None
|
||||
if finding_uid not in last_status_cache:
|
||||
most_recent_finding = (
|
||||
Finding.all_objects.filter(
|
||||
tenant_id=tenant_id, uid=finding_uid
|
||||
)
|
||||
.order_by("-inserted_at")
|
||||
.values("status", "first_seen_at")
|
||||
Finding.objects.filter(uid=finding_uid)
|
||||
.order_by("-id")
|
||||
.values("status")
|
||||
.first()
|
||||
)
|
||||
last_status = None
|
||||
if most_recent_finding:
|
||||
last_status = most_recent_finding["status"]
|
||||
last_first_seen_at = most_recent_finding["first_seen_at"]
|
||||
last_status_cache[finding_uid] = last_status, last_first_seen_at
|
||||
last_status = (
|
||||
most_recent_finding["status"]
|
||||
if most_recent_finding
|
||||
else None
|
||||
)
|
||||
last_status_cache[finding_uid] = last_status
|
||||
else:
|
||||
last_status, last_first_seen_at = last_status_cache[finding_uid]
|
||||
last_status = last_status_cache[finding_uid]
|
||||
|
||||
status = FindingStatus[finding.status]
|
||||
delta = _create_finding_delta(last_status, status)
|
||||
# For the findings prior to the change, when a first finding is found with delta!="new" it will be
|
||||
# assigned a current date as first_seen_at and the successive findings with the same UID will
|
||||
# always get the date of the previous finding.
|
||||
# For new findings, when a finding (delta="new") is found for the first time, the first_seen_at
|
||||
# attribute will be assigned the current date, the following findings will get that date.
|
||||
if not last_first_seen_at:
|
||||
last_first_seen_at = datetime.now(tz=timezone.utc)
|
||||
|
||||
# Create the finding
|
||||
finding_instance = Finding.objects.create(
|
||||
@@ -279,14 +253,11 @@ def perform_prowler_scan(
|
||||
raw_result=finding.raw,
|
||||
check_id=finding.check_id,
|
||||
scan=scan_instance,
|
||||
first_seen_at=last_first_seen_at,
|
||||
muted=finding.muted,
|
||||
compliance=finding.compliance,
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
# Update compliance data if applicable
|
||||
if finding.status.value == "MUTED":
|
||||
if not generate_compliance or finding.status.value == "MUTED":
|
||||
continue
|
||||
|
||||
region_dict = check_status_by_region.setdefault(finding.region, {})
|
||||
@@ -296,7 +267,7 @@ def perform_prowler_scan(
|
||||
region_dict[finding.check_id] = finding.status.value
|
||||
|
||||
# Update scan progress
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
scan_instance.progress = progress
|
||||
scan_instance.save()
|
||||
|
||||
@@ -308,13 +279,13 @@ def perform_prowler_scan(
|
||||
scan_instance.state = StateChoices.FAILED
|
||||
|
||||
finally:
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
scan_instance.duration = time.time() - start_time
|
||||
scan_instance.completed_at = datetime.now(tz=timezone.utc)
|
||||
scan_instance.unique_resource_count = len(unique_resources)
|
||||
scan_instance.save()
|
||||
|
||||
if exception is None:
|
||||
if exception is None and generate_compliance:
|
||||
try:
|
||||
regions = prowler_provider.get_regions()
|
||||
except AttributeError:
|
||||
@@ -359,18 +330,9 @@ def perform_prowler_scan(
|
||||
total_requirements=compliance["total_requirements"],
|
||||
)
|
||||
)
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceOverview.objects.bulk_create(
|
||||
compliance_overview_objects, batch_size=100
|
||||
)
|
||||
except Exception as overview_exception:
|
||||
import sentry_sdk
|
||||
with tenant_transaction(tenant_id):
|
||||
ComplianceOverview.objects.bulk_create(compliance_overview_objects)
|
||||
|
||||
sentry_sdk.capture_exception(overview_exception)
|
||||
logger.error(
|
||||
f"Error storing compliance overview for scan {scan_id}: {overview_exception}"
|
||||
)
|
||||
if exception is not None:
|
||||
raise exception
|
||||
|
||||
@@ -406,8 +368,8 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
- muted_new: Muted findings with a delta of 'new'.
|
||||
- muted_changed: Muted findings with a delta of 'changed'.
|
||||
"""
|
||||
with rls_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
|
||||
with tenant_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(scan_id=scan_id)
|
||||
|
||||
aggregation = findings.values(
|
||||
"check_id",
|
||||
@@ -417,21 +379,21 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
).annotate(
|
||||
fail=Sum(
|
||||
Case(
|
||||
When(status="FAIL", muted=False, then=1),
|
||||
When(status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
_pass=Sum(
|
||||
Case(
|
||||
When(status="PASS", muted=False, then=1),
|
||||
When(status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_count=Sum(
|
||||
muted=Sum(
|
||||
Case(
|
||||
When(muted=True, then=1),
|
||||
When(status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
@@ -439,70 +401,70 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
total=Count("id"),
|
||||
new=Sum(
|
||||
Case(
|
||||
When(delta="new", muted=False, then=1),
|
||||
When(delta="new", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", muted=False, then=1),
|
||||
When(delta="changed", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
unchanged=Sum(
|
||||
Case(
|
||||
When(delta__isnull=True, muted=False, then=1),
|
||||
When(delta__isnull=True, then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
fail_new=Sum(
|
||||
Case(
|
||||
When(delta="new", status="FAIL", muted=False, then=1),
|
||||
When(delta="new", status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
fail_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", status="FAIL", muted=False, then=1),
|
||||
When(delta="changed", status="FAIL", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
pass_new=Sum(
|
||||
Case(
|
||||
When(delta="new", status="PASS", muted=False, then=1),
|
||||
When(delta="new", status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
pass_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", status="PASS", muted=False, then=1),
|
||||
When(delta="changed", status="PASS", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_new=Sum(
|
||||
Case(
|
||||
When(delta="new", muted=True, then=1),
|
||||
When(delta="new", status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
muted_changed=Sum(
|
||||
Case(
|
||||
When(delta="changed", muted=True, then=1),
|
||||
When(delta="changed", status="MUTED", then=1),
|
||||
default=0,
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
with tenant_transaction(tenant_id):
|
||||
scan_aggregations = {
|
||||
ScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
@@ -513,7 +475,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
region=agg["resources__region"],
|
||||
fail=agg["fail"],
|
||||
_pass=agg["_pass"],
|
||||
muted=agg["muted_count"],
|
||||
muted=agg["muted"],
|
||||
total=agg["total"],
|
||||
new=agg["new"],
|
||||
changed=agg["changed"],
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user